From 48b60c85f0641fe92b116648b55e01b1e0f8d860 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:24:35 +0100 Subject: [PATCH 01/75] feat: Workflow refactoring, only async --- keep/api/core/db.py | 124 +++++++------- keep/api/core/db_utils.py | 14 +- keep/api/routes/workflows.py | 85 ++++------ keep/contextmanager/contextmanager.py | 11 +- keep/parser/parser.py | 16 +- keep/providers/base/base_provider.py | 34 +++- .../clickhouse_http_provider/__init__.py | 0 .../clickhouse_http_provider.py | 154 ++++++++++++++++++ keep/step/step.py | 16 +- keep/workflowmanager/workflow.py | 20 +-- keep/workflowmanager/workflowmanager.py | 35 ++-- keep/workflowmanager/workflowscheduler.py | 101 +++++------- keep/workflowmanager/workflowstore.py | 24 +-- 13 files changed, 393 insertions(+), 241 deletions(-) create mode 100644 keep/providers/clickhouse_http_provider/__init__.py create mode 100644 keep/providers/clickhouse_http_provider/clickhouse_http_provider.py diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 8949b6586..cfaff3c30 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -39,6 +39,7 @@ from sqlalchemy.exc import IntegrityError, OperationalError from sqlalchemy.orm import joinedload, selectinload, subqueryload from sqlalchemy.sql import exists, expression +from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel import Session, SQLModel, col, or_, select, text from keep.api.consts import STATIC_PRESETS @@ -82,6 +83,8 @@ engine = create_db_engine() +engine_async = create_db_engine(_async=True) + SQLAlchemyInstrumentor().instrument(enable_commenter=True, engine=engine) @@ -145,7 +148,7 @@ def __convert_to_uuid(value: str) -> UUID | None: return None -def create_workflow_execution( +async def create_workflow_execution( workflow_id: str, tenant_id: str, triggered_by: str, @@ -155,7 +158,7 @@ def create_workflow_execution( execution_id: str = None, event_type: str = "alert", ) -> str: - with Session(engine) as session: + async with AsyncSession(engine_async) as session: try: if len(triggered_by) > 255: triggered_by = triggered_by[:255] @@ -170,7 +173,7 @@ def create_workflow_execution( ) session.add(workflow_execution) # Ensure the object has an id - session.flush() + await session.flush() execution_id = workflow_execution.id if KEEP_AUDIT_EVENTS_ENABLED: if fingerprint and event_type == "alert": @@ -188,10 +191,10 @@ def create_workflow_execution( ) session.add(workflow_to_incident_execution) - session.commit() + await session.commit() return execution_id except IntegrityError: - session.rollback() + await session.rollback() logger.debug( f"Failed to create a new execution for workflow {workflow_id}. Constraint is met." ) @@ -226,12 +229,12 @@ def get_last_completed_execution( ).first() -def get_workflows_that_should_run(): - with Session(engine) as session: +async def get_workflows_that_should_run(): + async with AsyncSession(engine_async) as session: logger.debug("Checking for workflows that should run") workflows_with_interval = [] try: - result = session.exec( + result = await session.exec( select(Workflow) .filter(Workflow.is_deleted == False) .filter(Workflow.is_disabled == False) @@ -252,7 +255,7 @@ def get_workflows_that_should_run(): if not last_execution: try: # try to get the lock - workflow_execution_id = create_workflow_execution( + workflow_execution_id = await create_workflow_execution( workflow.id, workflow.tenant_id, "scheduler" ) # we succeed to get the lock on this execution number :) @@ -274,7 +277,7 @@ def get_workflows_that_should_run(): ): try: # try to get the lock with execution_number + 1 - workflow_execution_id = create_workflow_execution( + workflow_execution_id = await create_workflow_execution( workflow.id, workflow.tenant_id, "scheduler", @@ -294,10 +297,10 @@ def get_workflows_that_should_run(): # some other thread/instance has already started to work on it except IntegrityError: # we need to verify the locking is still valid and not timeouted - session.rollback() + await session.rollback() pass # get the ongoing execution - ongoing_execution = session.exec( + ongoing_execution = await session.exec( select(WorkflowExecution) .where(WorkflowExecution.workflow_id == workflow.id) .where( @@ -319,10 +322,10 @@ def get_workflows_that_should_run(): # if the ongoing execution runs more than 60 minutes, than its timeout elif ongoing_execution.started + timedelta(minutes=60) <= current_time: ongoing_execution.status = "timeout" - session.commit() + await session.commit() # re-create the execution and try to get the lock try: - workflow_execution_id = create_workflow_execution( + workflow_execution_id = await create_workflow_execution( workflow.id, workflow.tenant_id, "scheduler", @@ -479,22 +482,24 @@ def get_last_workflow_workflow_to_alert_executions( return latest_workflow_to_alert_executions -def get_last_workflow_execution_by_workflow_id( +async def get_last_workflow_execution_by_workflow_id( tenant_id: str, workflow_id: str ) -> Optional[WorkflowExecution]: - with Session(engine) as session: + async with AsyncSession(engine_async) as session: + q = select(WorkflowExecution).filter( + WorkflowExecution.workflow_id == workflow_id + ).filter(WorkflowExecution.tenant_id == tenant_id).filter( + WorkflowExecution.started >= datetime.now() - timedelta(days=7) + ).filter(WorkflowExecution.status == "success").order_by( + WorkflowExecution.started.desc() + ) workflow_execution = ( - session.query(WorkflowExecution) - .filter(WorkflowExecution.workflow_id == workflow_id) - .filter(WorkflowExecution.tenant_id == tenant_id) - .filter(WorkflowExecution.started >= datetime.now() - timedelta(days=1)) - .filter(WorkflowExecution.status == "success") - .order_by(WorkflowExecution.started.desc()) - .first() + (await session.exec(q)).first() ) return workflow_execution + def get_workflows_with_last_execution(tenant_id: str) -> List[dict]: with Session(engine) as session: latest_execution_cte = ( @@ -579,30 +584,32 @@ def get_all_workflows_yamls(tenant_id: str) -> List[str]: return workflows -def get_workflow(tenant_id: str, workflow_id: str) -> Workflow: - with Session(engine) as session: +async def get_workflow(tenant_id: str, workflow_id: str) -> Workflow: + async with AsyncSession(engine_async) as session: # if the workflow id is uuid: if validators.uuid(workflow_id): - workflow = session.exec( + workflow = await session.exec( select(Workflow) .where(Workflow.tenant_id == tenant_id) .where(Workflow.id == workflow_id) .where(Workflow.is_deleted == False) - ).first() + ) + workflow = workflow.first() else: - workflow = session.exec( + workflow = await session.exec( select(Workflow) .where(Workflow.tenant_id == tenant_id) .where(Workflow.name == workflow_id) .where(Workflow.is_deleted == False) - ).first() + ) + workflow = workflow.first() if not workflow: return None return workflow -def get_raw_workflow(tenant_id: str, workflow_id: str) -> str: - workflow = get_workflow(tenant_id, workflow_id) +async def get_raw_workflow(tenant_id: str, workflow_id: str) -> str: + workflow = await get_workflow(tenant_id, workflow_id) if not workflow: return None return workflow.workflow_raw @@ -650,33 +657,31 @@ def get_consumer_providers() -> List[Provider]: return providers -def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, error): - with Session(engine) as session: - workflow_execution = session.exec( - select(WorkflowExecution).where(WorkflowExecution.id == execution_id) - ).first() +async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, error): + async with AsyncSession(engine_async) as session: + workflow_execution = (await session.exec( + select(WorkflowExecution) + .where(WorkflowExecution.tenant_id == tenant_id) + .where(WorkflowExecution.workflow_id == workflow_id) + .where(WorkflowExecution.id == execution_id) + )).first() # some random number to avoid collisions if not workflow_execution: logger.warning( - f"Failed to finish workflow execution {execution_id} for workflow {workflow_id}. Execution not found.", - extra={ - "tenant_id": tenant_id, - "workflow_id": workflow_id, - "execution_id": execution_id, - }, + f"Failed to finish workflow execution {execution_id} for workflow {workflow_id}. Execution not found." ) raise ValueError("Execution not found") workflow_execution.is_running = random.randint(1, 2147483647 - 1) # max int workflow_execution.status = status # TODO: we had a bug with the error field, it was too short so some customers may fail over it. # we need to fix it in the future, create a migration that increases the size of the error field - # and then we can remove the [:511] from here - workflow_execution.error = error[:511] if error else None + # and then we can remove the [:255] from here + workflow_execution.error = error[:255] if error else None workflow_execution.execution_time = ( datetime.utcnow() - workflow_execution.started ).total_seconds() # TODO: logs - session.commit() + await session.commit() def get_workflow_executions( @@ -784,14 +789,14 @@ def delete_workflow_by_provisioned_file(tenant_id, provisioned_file): session.commit() -def get_workflow_id(tenant_id, workflow_name): - with Session(engine) as session: - workflow = session.exec( +async def get_workflow_id(tenant_id, workflow_name): + async with AsyncSession(engine_async) as session: + workflow = (await session.exec( select(Workflow) .where(Workflow.tenant_id == tenant_id) .where(Workflow.name == workflow_name) .where(Workflow.is_deleted == False) - ).first() + )).first() if workflow: return workflow.id @@ -1602,16 +1607,16 @@ def update_user_role(tenant_id, username, role): return user -def save_workflow_results(tenant_id, workflow_execution_id, workflow_results): - with Session(engine) as session: - workflow_execution = session.exec( +async def save_workflow_results(tenant_id, workflow_execution_id, workflow_results): + async with AsyncSession(engine_async) as session: + workflow_execution = (await session.exec( select(WorkflowExecution) .where(WorkflowExecution.tenant_id == tenant_id) .where(WorkflowExecution.id == workflow_execution_id) - ).one() + )).one() workflow_execution.results = workflow_results - session.commit() + await session.commit() def get_workflow_by_name(tenant_id, workflow_name): @@ -1625,10 +1630,10 @@ def get_workflow_by_name(tenant_id, workflow_name): return workflow - -def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id): - with Session(engine) as session: - previous_execution = session.exec( + +async def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id): + async with AsyncSession(engine_async) as session: + previous_execution = (await session.exec( select(WorkflowExecution) .where(WorkflowExecution.tenant_id == tenant_id) .where(WorkflowExecution.workflow_id == workflow_id) @@ -1638,13 +1643,14 @@ def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id): ) # no need to check more than 1 day ago .order_by(WorkflowExecution.started.desc()) .limit(1) - ).first() + )).first() if previous_execution: return previous_execution else: return None + def create_rule( tenant_id, name, diff --git a/keep/api/core/db_utils.py b/keep/api/core/db_utils.py index 97ffa3100..cbc6c61d9 100644 --- a/keep/api/core/db_utils.py +++ b/keep/api/core/db_utils.py @@ -15,6 +15,7 @@ from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.ddl import CreateColumn from sqlalchemy.sql.functions import GenericFunction +from sqlalchemy.ext.asyncio import create_async_engine from sqlmodel import Session, create_engine # This import is required to create the tables @@ -124,12 +125,13 @@ def dumps(_json) -> str: return json.dumps(_json, default=str) -def create_db_engine(): +def create_db_engine(_async=False): """ Creates a database engine based on the environment variables. """ + creator_method = create_engine if not _async else create_async_engine if RUNNING_IN_CLOUD_RUN and not KEEP_FORCE_CONNECTION_STRING: - engine = create_engine( + engine = creator_method( "mysql+pymysql://", creator=__get_conn, echo=DB_ECHO, @@ -138,7 +140,7 @@ def create_db_engine(): max_overflow=DB_MAX_OVERFLOW, ) elif DB_CONNECTION_STRING == "impersonate": - engine = create_engine( + engine = creator_method( "mysql+pymysql://", creator=__get_conn_impersonate, echo=DB_ECHO, @@ -147,7 +149,7 @@ def create_db_engine(): elif DB_CONNECTION_STRING: try: logger.info(f"Creating a connection pool with size {DB_POOL_SIZE}") - engine = create_engine( + engine = creator_method( DB_CONNECTION_STRING, pool_size=DB_POOL_SIZE, max_overflow=DB_MAX_OVERFLOW, @@ -157,11 +159,11 @@ def create_db_engine(): ) # SQLite does not support pool_size except TypeError: - engine = create_engine( + engine = creator_method( DB_CONNECTION_STRING, json_serializer=dumps, echo=DB_ECHO ) else: - engine = create_engine( + engine = creator_method( "sqlite:///./keep.db", connect_args={"check_same_thread": False}, echo=DB_ECHO, diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py index 7a5912996..863bbcf10 100644 --- a/keep/api/routes/workflows.py +++ b/keep/api/routes/workflows.py @@ -168,10 +168,8 @@ def export_workflows( "/{workflow_id}/run", description="Run a workflow", ) -def run_workflow( +async def run_workflow( workflow_id: str, - event_type: Optional[str] = Query(None), - event_id: Optional[str] = Query(None), body: Optional[Dict[Any, Any]] = Body(None), authenticated_entity: AuthenticatedEntity = Depends( IdentityManagerFactory.get_auth_verifier(["write:workflows"]) @@ -185,55 +183,41 @@ def run_workflow( if not validators.uuid(workflow_id): logger.info("Workflow ID is not a UUID, trying to get the ID by name") workflow_id = getattr(get_workflow_by_name(tenant_id, workflow_id), "id", None) - workflowmanager = WorkflowManager.get_instance() + # Finally, run it try: - # Handle replay from query parameters - if event_type and event_id: - if event_type == "alert": - # Fetch alert from your alert store - alert_db = get_alert_by_event_id(tenant_id, event_id) - event = convert_db_alerts_to_dto_alerts([alert_db])[0] - elif event_type == "incident": - # SHAHAR: TODO - raise NotImplementedError("Incident replay is not supported yet") - else: - raise HTTPException( - status_code=400, - detail=f"Invalid event type: {event_type}", - ) + + if body.get("type", "alert") == "alert": + event_class = AlertDto else: - # Handle regular run from body - event_body = body.get("body", {}) or body - event_class = ( - AlertDto if body.get("type", "alert") == "alert" else IncidentDto + event_class = IncidentDto + + event_body = body.get("body", {}) or body + + # if its event that was triggered by the UI with the Modal + fingerprint = event_body.get("fingerprint", "") + if (fingerprint and "test-workflow" in fingerprint) or not body: + # some random + event_body["id"] = event_body.get("fingerprint", "manual-run") + event_body["name"] = event_body.get("fingerprint", "manual-run") + event_body["lastReceived"] = datetime.datetime.now( + tz=datetime.timezone.utc + ).isoformat() + if "source" in event_body and not isinstance(event_body["source"], list): + event_body["source"] = [event_body["source"]] + try: + event = event_class(**event_body) + except TypeError: + raise HTTPException( + status_code=400, + detail="Invalid event format", ) - # Handle UI triggered events - fingerprint = event_body.get("fingerprint", "") - if (fingerprint and "test-workflow" in fingerprint) or not body: - event_body["id"] = event_body.get("fingerprint", "manual-run") - event_body["name"] = event_body.get("fingerprint", "manual-run") - event_body["lastReceived"] = datetime.datetime.now( - tz=datetime.timezone.utc - ).isoformat() - if "source" in event_body and not isinstance( - event_body["source"], list - ): - event_body["source"] = [event_body["source"]] - - try: - event = event_class(**event_body) - except TypeError: - raise HTTPException( - status_code=400, - detail="Invalid event format", - ) - - workflow_execution_id = workflowmanager.scheduler.handle_manual_event_workflow( + workflow_execution_id = await workflowmanager.scheduler.handle_manual_event_workflow( workflow_id, tenant_id, created_by, event ) + except Exception as e: logger.exception( "Failed to run workflow", @@ -243,7 +227,6 @@ def run_workflow( status_code=500, detail=f"Failed to run workflow {workflow_id}: {e}", ) - logger.info( "Workflow ran successfully", extra={ @@ -528,7 +511,7 @@ async def update_workflow_by_id( @router.get("/{workflow_id}/raw", description="Get workflow executions by ID") -def get_raw_workflow_by_id( +async def get_raw_workflow_by_id( workflow_id: str, authenticated_entity: AuthenticatedEntity = Depends( IdentityManagerFactory.get_auth_verifier(["read:workflows"]) @@ -539,7 +522,7 @@ def get_raw_workflow_by_id( return JSONResponse( status_code=200, content={ - "workflow_raw": workflowstore.get_raw_workflow( + "workflow_raw": await workflowstore.get_raw_workflow( tenant_id=tenant_id, workflow_id=workflow_id ) }, @@ -547,7 +530,7 @@ def get_raw_workflow_by_id( @router.get("/{workflow_id}", description="Get workflow by ID") -def get_workflow_by_id( +async def get_workflow_by_id( workflow_id: str, authenticated_entity: AuthenticatedEntity = Depends( IdentityManagerFactory.get_auth_verifier(["read:workflows"]) @@ -555,7 +538,7 @@ def get_workflow_by_id( ): tenant_id = authenticated_entity.tenant_id # get all workflow - workflow = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id) + workflow = await get_workflow(tenant_id=tenant_id, workflow_id=workflow_id) if not workflow: logger.warning( @@ -590,7 +573,7 @@ def get_workflow_by_id( @router.get("/{workflow_id}/runs", description="Get workflow executions by ID") -def get_workflow_runs_by_id( +async def get_workflow_runs_by_id( workflow_id: str, tab: int = 1, limit: int = 25, @@ -603,7 +586,7 @@ def get_workflow_runs_by_id( ), ) -> WorkflowExecutionsPaginatedResultsDto: tenant_id = authenticated_entity.tenant_id - workflow = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id) + workflow = await get_workflow(tenant_id=tenant_id, workflow_id=workflow_id) installed_providers = get_installed_providers(tenant_id) installed_providers_by_type = {} for installed_provider in installed_providers: diff --git a/keep/contextmanager/contextmanager.py b/keep/contextmanager/contextmanager.py index 95b51cb8f..cf2fdc85b 100644 --- a/keep/contextmanager/contextmanager.py +++ b/keep/contextmanager/contextmanager.py @@ -1,4 +1,5 @@ # TODO - refactor context manager to support multitenancy in a more robust way +import asyncio import logging import click @@ -53,11 +54,9 @@ def __init__( "last_workflow_results" in workflow_str ) if last_workflow_results_in_workflow: - last_workflow_execution = ( - get_last_workflow_execution_by_workflow_id( - tenant_id, workflow_id - ) - ) + last_workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + tenant_id, workflow_id + )) if last_workflow_execution is not None: self.last_workflow_execution_results = ( last_workflow_execution.results @@ -251,7 +250,7 @@ def set_step_vars(self, step_id, _vars): self.current_step_vars = _vars self.steps_context[step_id]["vars"] = _vars - def get_last_workflow_run(self, workflow_id): + async def get_last_workflow_run(self, workflow_id): return get_last_workflow_execution_by_workflow_id(self.tenant_id, workflow_id) def dump(self): diff --git a/keep/parser/parser.py b/keep/parser/parser.py index d10e4d2ad..caaa69767 100644 --- a/keep/parser/parser.py +++ b/keep/parser/parser.py @@ -19,7 +19,7 @@ class Parser: def __init__(self): self.logger = logging.getLogger(__name__) - def _get_workflow_id(self, tenant_id, workflow: dict) -> str: + async def _get_workflow_id(self, tenant_id, workflow: dict) -> str: """Support both CLI and API workflows Args: @@ -38,7 +38,7 @@ def _get_workflow_id(self, tenant_id, workflow: dict) -> str: raise ValueError("Workflow dict must have an id") # get the workflow id from the database - workflow_id = get_workflow_id(tenant_id, workflow_name) + workflow_id = await get_workflow_id(tenant_id, workflow_name) # if the workflow id is not found, it means that the workflow is not stored in the db # for example when running from CLI # so for backward compatibility, we will use the workflow name as the id @@ -47,7 +47,7 @@ def _get_workflow_id(self, tenant_id, workflow: dict) -> str: workflow_id = workflow_name return workflow_id - def parse( + async def parse( self, tenant_id, parsed_workflow_yaml: dict, @@ -71,7 +71,7 @@ def parse( "workflows" ) or parsed_workflow_yaml.get("alerts") workflows = [ - self._parse_workflow( + await self._parse_workflow( tenant_id, workflow, providers_file, @@ -86,7 +86,7 @@ def parse( raw_workflow = parsed_workflow_yaml.get( "workflow" ) or parsed_workflow_yaml.get("alert") - workflow = self._parse_workflow( + workflow = await self._parse_workflow( tenant_id, raw_workflow, providers_file, @@ -97,7 +97,7 @@ def parse( workflows = [workflow] # else, if it stored in the db, it stored without the "workflow" key else: - workflow = self._parse_workflow( + workflow = await self._parse_workflow( tenant_id, parsed_workflow_yaml, providers_file, @@ -125,7 +125,7 @@ def _get_workflow_provider_types_from_steps_and_actions( ) return provider_types - def _parse_workflow( + async def _parse_workflow( self, tenant_id, workflow: dict, @@ -135,7 +135,7 @@ def _parse_workflow( workflow_actions: dict = None, ) -> Workflow: self.logger.debug("Parsing workflow") - workflow_id = self._get_workflow_id(tenant_id, workflow) + workflow_id = await self._get_workflow_id(tenant_id, workflow) context_manager = ContextManager( tenant_id=tenant_id, workflow_id=workflow_id, workflow=workflow ) diff --git a/keep/providers/base/base_provider.py b/keep/providers/base/base_provider.py index 44e2ebe20..01bb7cc86 100644 --- a/keep/providers/base/base_provider.py +++ b/keep/providers/base/base_provider.py @@ -3,9 +3,12 @@ """ import abc +import asyncio +from concurrent.futures import ThreadPoolExecutor import copy import datetime import hashlib +import inspect import itertools import json import logging @@ -66,6 +69,7 @@ class BaseProvider(metaclass=abc.ABCMeta): Literal["alert", "ticketing", "messaging", "data", "queue", "topology"] ] = [] WEBHOOK_INSTALLATION_REQUIRED = False # webhook installation is required for this provider, making it required in the UI + thread_executor_for_sync_methods = ThreadPoolExecutor() def __init__( self, @@ -145,15 +149,24 @@ def validate_scopes(self) -> dict[str, bool | str]: """ return {} - def notify(self, **kwargs): + async def notify(self, **kwargs): """ Output alert message. Args: **kwargs (dict): The provider context (with statement) """ - # trigger the provider - results = self._notify(**kwargs) + # Trigger the provider, allow async and non-async functions + if inspect.iscoroutinefunction(self._notify): + results = await self._notify(**kwargs) + else: + loop = asyncio.get_running_loop() + # Running in a thread executor to avoid blocking the event loop + results = await loop.run_in_executor( + self.__class__.thread_executor_for_sync_methods, + lambda: self._notify(**kwargs) + ) + self.logger.warning(f"Provider {self.provider_type} notify method is not async. This may cause performance issues.") self.results.append(results) # if the alert should be enriched, enrich it enrich_alert = kwargs.get("enrich_alert", []) @@ -299,9 +312,18 @@ def _query(self, **kwargs: dict): """ raise NotImplementedError("query() method not implemented") - def query(self, **kwargs: dict): - # just run the query - results = self._query(**kwargs) + async def query(self, **kwargs: dict): + # Run the query, it may be sync or async + if inspect.iscoroutinefunction(self._query): + results = await self._query(**kwargs) + else: + loop = asyncio.get_running_loop() + # Running in a thread executor to avoid blocking the event loop + results = await loop.run_in_executor( + self.__class__.thread_executor_for_sync_methods, + lambda: self._query(**kwargs) + ) + self.logger.warning(f"Provider {self.provider_type} _query method is not async. This may cause performance issues") self.results.append(results) # now add the type of the results to the global context if results and isinstance(results, list): diff --git a/keep/providers/clickhouse_http_provider/__init__.py b/keep/providers/clickhouse_http_provider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py new file mode 100644 index 000000000..c9ff13cb1 --- /dev/null +++ b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py @@ -0,0 +1,154 @@ +""" +Clickhouse is a class that provides a way to read data from Clickhouse. +""" + +import os +import asyncio +import pydantic +import dataclasses + +import clickhouse_connect + +from keep.contextmanager.contextmanager import ContextManager +from keep.providers.base.base_provider import BaseProvider +from keep.providers.models.provider_config import ProviderConfig, ProviderScope +from keep.validation.fields import NoSchemeUrl, UrlPort + + +@pydantic.dataclasses.dataclass +class ClickhouseHttpProviderAuthConfig: + username: str = dataclasses.field( + metadata={"required": True, "description": "Clickhouse username"} + ) + password: str = dataclasses.field( + metadata={ + "required": True, + "description": "Clickhouse password", + "sensitive": True, + } + ) + host: NoSchemeUrl = dataclasses.field( + metadata={ + "required": True, + "description": "Clickhouse hostname", + "validation": "no_scheme_url", + } + ) + port: UrlPort = dataclasses.field( + metadata={ + "required": True, + "description": "Clickhouse port", + "validation": "port", + } + ) + database: str | None = dataclasses.field( + metadata={"required": False, "description": "Clickhouse database name"}, + default=None, + ) + + +class ClickhouseHttpProvider(BaseProvider): + """Enrich alerts with data from Clickhouse.""" + + PROVIDER_DISPLAY_NAME = "Clickhouse Http" + PROVIDER_CATEGORY = ["Database"] + + PROVIDER_SCOPES = [ + ProviderScope( + name="connect_to_server", + description="The user can connect to the server", + mandatory=True, + alias="Connect to the server", + ) + ] + SHARED_CLIENT = {} # Caching the client to avoid creating a new one for each query + + def __init__( + self, context_manager: ContextManager, provider_id: str, config: ProviderConfig + ): + super().__init__(context_manager, provider_id, config) + self.client = None + + def dispose(self): + pass + + def validate_scopes(self): + """ + Validates that the user has the required scopes to use the provider. + """ + try: + client = asyncio.run(self.__generate_client()) + + tables = result = asyncio.run(client.query("SHOW TABLES")) + self.logger.info(f"Tables: {tables}") + + scopes = { + "connect_to_server": True, + } + except Exception as e: + self.logger.exception("Error validating scopes") + scopes = { + "connect_to_server": str(e), + } + return scopes + + async def __generate_client(self): + """ + Generates a Clickhouse client. + """ + if self.context_manager.tenant_id + self.provider_id in ClickhouseProvider.SHARED_CLIENT: + return ClickhouseProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] + + user = self.authentication_config.username + password = self.authentication_config.password + host = self.authentication_config.host + database = self.authentication_config.database + port = self.authentication_config.port + + client = await clickhouse_connect.get_async_client( + host=host, + port=port, + user=user, + password=password, + database=database, + ) + ClickhouseProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] = client + + return client + + def validate_config(self): + """ + Validates required configuration for Clickhouse's provider. + """ + self.authentication_config = ClickhouseProviderAuthConfig( + **self.config.authentication + ) + return True + + async def _query(self, query="", single_row=False, **kwargs: dict) -> list | tuple: + """ + Executes a query against the Clickhouse database. + Returns: + list | tuple: list of results or single result if single_row is True + """ + return await self._notify(query=query, single_row=single_row, **kwargs) + + async def _notify(self, query="", single_row=False, **kwargs: dict) -> list | tuple: + """ + Executes a query against the Clickhouse database. + Returns: + list | tuple: list of results or single result if single_row is True + """ + # return {'dt': datetime.datetime(2024, 12, 4, 6, 37, 22), 'customer_id': 99999999, 'total_spent': 19.850000381469727} + client = await self.__generate_client() + results = await client.query(query, **kwargs) + rows = results.result_rows + columns = results.column_names + + # Making the results more human readable and compatible with the format we had with sync library before. + results = [dict(zip(columns, row)) for row in rows] + + if single_row: + return results[0] + + return results \ No newline at end of file diff --git a/keep/step/step.py b/keep/step/step.py index 797ebb914..35c6b97de 100644 --- a/keep/step/step.py +++ b/keep/step/step.py @@ -51,12 +51,12 @@ def foreach(self): def name(self): return self.step_id - def run(self): + async def run(self): try: if self.config.get("foreach"): - did_action_run = self._run_foreach() + did_action_run = await self._run_foreach() else: - did_action_run = self._run_single() + did_action_run = await self._run_single() return did_action_run except Exception as e: self.logger.error( @@ -101,7 +101,7 @@ def _get_foreach_items(self) -> list | list[list]: return [] return len(foreach_items) == 1 and foreach_items[0] or zip(*foreach_items) - def _run_foreach(self): + async def _run_foreach(self): """Evaluate the action for each item, when using the `foreach` attribute (see foreach.md)""" # the item holds the value we are going to iterate over items = self._get_foreach_items() @@ -110,7 +110,7 @@ def _run_foreach(self): for item in items: self.context_manager.set_for_each_context(item) try: - did_action_run = self._run_single() + did_action_run = await self._run_single() except Exception as e: self.logger.error(f"Failed to run action with error {e}") continue @@ -120,7 +120,7 @@ def _run_foreach(self): any_action_run = True return any_action_run - def _run_single(self): + async def _run_single(self): # Initialize all conditions conditions = [] self.context_manager.set_step_vars(self.step_id, _vars=self.vars) @@ -252,11 +252,11 @@ def _run_single(self): ) try: if self.step_type == StepType.STEP: - step_output = self.provider.query( + step_output = await self.provider.query( **rendered_providers_parameters ) else: - step_output = self.provider.notify( + step_output = await self.provider.notify( **rendered_providers_parameters ) # exiting the loop as step/action execution was successful diff --git a/keep/workflowmanager/workflow.py b/keep/workflowmanager/workflow.py index 2d49c2e20..ed5f86375 100644 --- a/keep/workflowmanager/workflow.py +++ b/keep/workflowmanager/workflow.py @@ -53,12 +53,12 @@ def __init__( self.io_nandler = IOHandler(context_manager) self.logger = self.context_manager.get_logger() - def run_steps(self): + async def run_steps(self): self.logger.debug(f"Running steps for workflow {self.workflow_id}") for step in self.workflow_steps: try: self.logger.info("Running step %s", step.step_id) - step_ran = step.run() + step_ran = await step.run() if step_ran: self.logger.info("Step %s ran successfully", step.step_id) except StepError as e: @@ -66,10 +66,10 @@ def run_steps(self): raise self.logger.debug(f"Steps for workflow {self.workflow_id} ran successfully") - def run_action(self, action: Step): + async def run_action(self, action: Step): self.logger.info("Running action %s", action.name) try: - action_ran = action.run() + action_ran = await action.run() action_error = None if action_ran: self.logger.info("Action %s ran successfully", action.name) @@ -79,26 +79,26 @@ def run_action(self, action: Step): action_error = f"Failed to run action {action.name}: {str(e)}" return action_ran, action_error - def run_actions(self): + async def run_actions(self): self.logger.debug("Running actions") actions_firing = [] actions_errors = [] for action in self.workflow_actions: - action_status, action_error = self.run_action(action) + action_status, action_error = await self.run_action(action) if action_error: actions_firing.append(action_status) actions_errors.append(action_error) self.logger.debug("Actions run") return actions_firing, actions_errors - def run(self, workflow_execution_id): + async def run(self, workflow_execution_id): if self.workflow_disabled: self.logger.info(f"Skipping disabled workflow {self.workflow_id}") return self.logger.info(f"Running workflow {self.workflow_id}") self.context_manager.set_execution_context(workflow_execution_id) try: - self.run_steps() + await self.run_steps() except StepError as e: self.logger.error( f"Workflow {self.workflow_id} failed: {e}", @@ -111,8 +111,8 @@ def run(self, workflow_execution_id): self.logger.info(f"Finish to run workflow {self.workflow_id}") return actions_errors - def _handle_actions(self): + async def _handle_actions(self): self.logger.debug(f"Handling actions for workflow {self.workflow_id}") for action in self.workflow_actions: - action.run() + await action.run() self.logger.debug(f"Actions handled for workflow {self.workflow_id}") diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index 01d1f9f16..c711e5715 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -3,6 +3,7 @@ import re import typing import uuid +import asyncio from keep.api.core.config import config from keep.api.core.db import ( @@ -42,7 +43,7 @@ async def start(self): if self.started: self.logger.info("Workflow manager already started") return - await self.scheduler.start() + asyncio.create_task(self.scheduler.start()) self.started = True def stop(self): @@ -69,11 +70,11 @@ def _apply_filter(self, filter_val, value): return value == str(filter_val) return value == filter_val - def _get_workflow_from_store(self, tenant_id, workflow_model): + async def _get_workflow_from_store(self, tenant_id, workflow_model): try: # get the actual workflow that can be triggered self.logger.info("Getting workflow from store") - workflow = self.workflow_store.get_workflow(tenant_id, workflow_model.id) + workflow = await self.workflow_store.get_workflow(tenant_id, workflow_model.id) self.logger.info("Got workflow from store") return workflow except ProviderConfigurationException: @@ -109,7 +110,7 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str): f"tenant_id={workflow_model.tenant_id} - Workflow is disabled." ) continue - workflow = self._get_workflow_from_store(tenant_id, workflow_model) + workflow = asyncio.run(self._get_workflow_from_store(tenant_id, workflow_model)) if workflow is None: continue @@ -142,7 +143,7 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str): ) self.logger.info("Workflow added to run") - def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]): + async def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]): for event in events: self.logger.info("Getting all workflows") all_workflow_models = self.workflow_store.get_all_workflows(tenant_id) @@ -160,7 +161,7 @@ def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]): f"tenant_id={workflow_model.tenant_id} - Workflow is disabled." ) continue - workflow = self._get_workflow_from_store(tenant_id, workflow_model) + workflow = await self._get_workflow_from_store(tenant_id, workflow_model) if workflow is None: continue @@ -384,7 +385,7 @@ def _check_premium_providers(self, workflow: Workflow): f"Provider {provider} is a premium provider. You can self-host or contact us to get access to it." ) - def _run_workflow_on_failure( + async def _run_workflow_on_failure( self, workflow: Workflow, workflow_execution_id: str, error_message: str ): """ @@ -409,7 +410,7 @@ def _run_workflow_on_failure( f"Workflow {workflow.workflow_id} failed with errors: {error_message}" ) workflow.on_failure.provider_parameters = {"message": message} - workflow.on_failure.run() + await workflow.on_failure.run() self.logger.info( "Ran on_failure action for workflow", extra={ @@ -428,7 +429,7 @@ def _run_workflow_on_failure( }, ) - def _run_workflow( + async def _run_workflow( self, workflow: Workflow, workflow_execution_id: str, test_run=False ): self.logger.debug(f"Running workflow {workflow.workflow_id}") @@ -436,9 +437,9 @@ def _run_workflow( results = {} try: self._check_premium_providers(workflow) - errors = workflow.run(workflow_execution_id) + errors = await workflow.run(workflow_execution_id) if errors: - self._run_workflow_on_failure( + await self._run_workflow_on_failure( workflow, workflow_execution_id, ", ".join(errors) ) except Exception as e: @@ -446,7 +447,7 @@ def _run_workflow( f"Error running workflow {workflow.workflow_id}", extra={"exception": e, "workflow_execution_id": workflow_execution_id}, ) - self._run_workflow_on_failure(workflow, workflow_execution_id, str(e)) + await self._run_workflow_on_failure(workflow, workflow_execution_id, str(e)) raise finally: if not test_run: @@ -460,7 +461,7 @@ def _run_workflow( if test_run: results = self._get_workflow_results(workflow) else: - self._save_workflow_results(workflow, workflow_execution_id) + await self._save_workflow_results(workflow, workflow_execution_id) return [errors, results] @@ -485,7 +486,7 @@ def _get_workflow_results(workflow: Workflow): ) return workflow_results - def _save_workflow_results(self, workflow: Workflow, workflow_execution_id: str): + async def _save_workflow_results(self, workflow: Workflow, workflow_execution_id: str): """ Save the results of the workflow to the DB. @@ -502,7 +503,7 @@ def _save_workflow_results(self, workflow: Workflow, workflow_execution_id: str) {step.name: step.provider.results for step in workflow.workflow_steps} ) try: - save_workflow_results( + await save_workflow_results( tenant_id=workflow.context_manager.tenant_id, workflow_execution_id=workflow_execution_id, workflow_results=workflow_results, @@ -520,9 +521,9 @@ def _run_workflows_from_cli(self, workflows: typing.List[Workflow]): for workflow in workflows: try: random_workflow_id = str(uuid.uuid4()) - errors, _ = self._run_workflow( + errors, _ = asyncio.run(self._run_workflow( workflow, workflow_execution_id=random_workflow_id - ) + )) workflows_errors.append(errors) except Exception as e: self.logger.error( diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 2fe05e1ed..ab88a8e7b 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -1,3 +1,4 @@ +import asyncio import enum import hashlib import logging @@ -47,16 +48,16 @@ def __init__(self, workflow_manager): config("WORKFLOWS_INTERVAL_ENABLED", default="true") == "true" ) - async def start(self): + async def start(self, loop=None): self.logger.info("Starting workflows scheduler") # Shahar: fix for a bug in unit tests self._stop = False - thread = threading.Thread(target=self._start) - thread.start() - self.threads.append(thread) + if loop is None: + loop = asyncio.get_running_loop() + loop.create_task(self._start()) self.logger.info("Workflows scheduler started") - def _handle_interval_workflows(self): + async def _handle_interval_workflows(self): workflows = [] if not self.interval_enabled: @@ -65,7 +66,7 @@ def _handle_interval_workflows(self): try: # get all workflows that should run due to interval - workflows = get_workflows_that_should_run() + workflows = await get_workflows_that_should_run() except Exception: self.logger.exception("Error getting workflows that should run") pass @@ -76,7 +77,7 @@ def _handle_interval_workflows(self): tenant_id = workflow.get("tenant_id") workflow_id = workflow.get("workflow_id") try: - workflow = self.workflow_store.get_workflow(tenant_id, workflow_id) + workflow = await self.workflow_store.get_workflow(tenant_id, workflow_id) except ProviderConfigurationException: self.logger.exception( "Provider configuration is invalid", @@ -86,7 +87,7 @@ def _handle_interval_workflows(self): "tenant_id": tenant_id, }, ) - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -96,7 +97,7 @@ def _handle_interval_workflows(self): continue except Exception as e: self.logger.error(f"Error getting workflow: {e}") - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -104,14 +105,9 @@ def _handle_interval_workflows(self): error=f"Error getting workflow: {e}", ) continue - thread = threading.Thread( - target=self._run_workflow, - args=[tenant_id, workflow_id, workflow, workflow_execution_id], - ) - thread.start() - self.threads.append(thread) + await asyncio.create_task(self._run_workflow(tenant_id, workflow_id, workflow, workflow_execution_id)) - def _run_workflow( + async def _run_workflow( self, tenant_id, workflow_id, @@ -122,7 +118,7 @@ def _run_workflow( if READ_ONLY_MODE: # This is because sometimes workflows takes 0 seconds and the executions chart is not updated properly. self.logger.debug("Sleeping for 3 seconds in favor of read only mode") - time.sleep(3) + await asyncio.sleep(3) self.logger.info(f"Running workflow {workflow.workflow_id}...") try: @@ -133,12 +129,12 @@ def _run_workflow( # set the incident context, e.g. the incident that triggered the workflow workflow.context_manager.set_incident_context(event_context) - errors, _ = self.workflow_manager._run_workflow( + errors, _ = await self.workflow_manager._run_workflow( workflow, workflow_execution_id ) except Exception as e: self.logger.exception(f"Failed to run workflow {workflow.workflow_id}...") - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -149,7 +145,7 @@ def _run_workflow( if any(errors): self.logger.info(msg=f"Workflow {workflow.workflow_id} ran with errors") - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -157,7 +153,7 @@ def _run_workflow( error="\n".join(str(e) for e in errors), ) else: - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -165,8 +161,9 @@ def _run_workflow( error=None, ) self.logger.info(f"Workflow {workflow.workflow_id} ran") + return True - def handle_workflow_test(self, workflow, tenant_id, triggered_by_user): + async def handle_workflow_test(self, workflow, tenant_id, triggered_by_user): workflow_execution_id = self._get_unique_execution_number() @@ -227,7 +224,7 @@ def run_workflow_wrapper( "results": results, } - def handle_manual_event_workflow( + async def handle_manual_event_workflow( self, workflow_id, tenant_id, triggered_by_user, event: [AlertDto | IncidentDto] ): self.logger.info(f"Running manual event workflow {workflow_id}...") @@ -244,7 +241,7 @@ def handle_manual_event_workflow( event_type = "alert" fingerprint = event.fingerprint - workflow_execution_id = create_workflow_execution( + workflow_execution_id = await create_workflow_execution( workflow_id=workflow_id, tenant_id=tenant_id, triggered_by=f"manually by {triggered_by_user}", @@ -300,7 +297,7 @@ def _get_unique_execution_number(self, fingerprint=None): WorkflowScheduler.MAX_SIZE_SIGNED_INT + 1 ) - def _handle_event_workflows(self): + async def _handle_event_workflows(self): # TODO - event workflows should be in DB too, to avoid any state problems. # take out all items from the workflows to run and run them, also, clean the self.workflows_to_run list @@ -324,13 +321,13 @@ def _handle_event_workflows(self): if not workflow: self.logger.info("Loading workflow") try: - workflow = self.workflow_store.get_workflow( + workflow = await self.workflow_store.get_workflow( workflow_id=workflow_id, tenant_id=tenant_id ) # In case the provider are not configured properly except ProviderConfigurationException as e: self.logger.error(f"Error getting workflow: {e}") - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -340,7 +337,7 @@ def _handle_event_workflows(self): continue except Exception as e: self.logger.error(f"Error getting workflow: {e}") - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -384,7 +381,7 @@ def _handle_event_workflows(self): workflow_execution_number = self._get_unique_execution_number( fingerprint ) - workflow_execution_id = create_workflow_execution( + workflow_execution_id = await create_workflow_execution( workflow_id=workflow_id, tenant_id=tenant_id, triggered_by=triggered_by, @@ -431,7 +428,7 @@ def _handle_event_workflows(self): "tenant_id": tenant_id, }, ) - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -490,7 +487,7 @@ def _handle_event_workflows(self): "tenant_id": tenant_id, }, ) - self._finish_workflow_execution( + await self._finish_workflow_execution( tenant_id=tenant_id, workflow_id=workflow_id, workflow_execution_id=workflow_execution_id, @@ -498,43 +495,31 @@ def _handle_event_workflows(self): error=f"Error getting alert by id: {e}", ) continue - # Last, run the workflow - thread = threading.Thread( - target=self._run_workflow, - args=[tenant_id, workflow_id, workflow, workflow_execution_id, event], - ) - thread.start() - self.threads.append(thread) + asyncio.create_task(self._run_workflow(tenant_id, workflow_id, workflow, workflow_execution_id, event)) - def _start(self): + async def _start(self): self.logger.info("Starting workflows scheduler") while not self._stop: # get all workflows that should run now self.logger.debug("Getting workflows that should run...") try: - self._handle_interval_workflows() - self._handle_event_workflows() + await self._handle_interval_workflows() + await self._handle_event_workflows() except Exception: # This is the "mainloop" of the scheduler, we don't want to crash it # But any exception here should be investigated self.logger.exception("Error getting workflows that should run") pass self.logger.debug("Sleeping until next iteration") - time.sleep(1) + await asyncio.sleep(1) self.logger.info("Workflows scheduler stopped") - def run_workflows(self, workflows: typing.List[Workflow]): + async def run_workflows(self, workflows: typing.List[Workflow]): for workflow in workflows: - thread = threading.Thread( - target=self._run_workflows_with_interval, - args=[workflow], - daemon=True, - ) - thread.start() - self.threads.append(thread) + asyncio.create_task(self._run_workflows_with_interval(workflow)) # as long as the stop flag is not set, sleep while not self._stop: - time.sleep(1) + await asyncio.sleep(1) def stop(self): self.logger.info("Stopping scheduled workflows") @@ -544,7 +529,7 @@ def stop(self): thread.join() self.logger.info("Scheduled workflows stopped") - def _run_workflows_with_interval( + async def _run_workflows_with_interval( self, workflow: Workflow, ): @@ -558,7 +543,7 @@ def _run_workflows_with_interval( while True and not self._stop: self.logger.info(f"Running workflow {workflow.workflow_id}...") try: - self.workflow_manager._run_workflow(workflow, uuid.uuid4()) + await self.workflow_manager._run_workflow(workflow, uuid.uuid4()) except Exception: self.logger.exception( f"Failed to run workflow {workflow.workflow_id}..." @@ -568,12 +553,12 @@ def _run_workflows_with_interval( self.logger.info( f"Sleeping for {workflow.workflow_interval} seconds..." ) - time.sleep(workflow.workflow_interval) + await asyncio.sleep(workflow.workflow_interval) else: self.logger.info("Workflow will not run again") break - def _finish_workflow_execution( + async def _finish_workflow_execution( self, tenant_id: str, workflow_id: str, @@ -582,7 +567,7 @@ def _finish_workflow_execution( error=None, ): # mark the workflow execution as finished in the db - finish_workflow_execution_db( + await finish_workflow_execution_db( tenant_id=tenant_id, workflow_id=workflow_id, execution_id=workflow_execution_id, @@ -592,7 +577,7 @@ def _finish_workflow_execution( if KEEP_EMAILS_ENABLED: # get the previous workflow execution id - previous_execution = get_previous_execution_id( + previous_execution = await get_previous_execution_id( tenant_id, workflow_id, workflow_execution_id ) # if error, send an email @@ -601,7 +586,7 @@ def _finish_workflow_execution( is None # this means this is the first execution, for example or previous_execution.status != WorkflowStatus.ERROR.value ): - workflow = get_workflow_db(tenant_id=tenant_id, workflow_id=workflow_id) + workflow = await get_workflow_db(tenant_id=tenant_id, workflow_id=workflow_id) try: keep_platform_url = config( "KEEP_PLATFORM_URL", default="https://platform.keephq.dev" diff --git a/keep/workflowmanager/workflowstore.py b/keep/workflowmanager/workflowstore.py index 109a4d2b2..e75884a1a 100644 --- a/keep/workflowmanager/workflowstore.py +++ b/keep/workflowmanager/workflowstore.py @@ -98,21 +98,21 @@ def _parse_workflow_to_dict(self, workflow_path: str) -> dict: with open(workflow_path, "r") as file: return self._read_workflow_from_stream(file) - def get_raw_workflow(self, tenant_id: str, workflow_id: str) -> str: - raw_workflow = get_raw_workflow(tenant_id, workflow_id) + async def get_raw_workflow(self, tenant_id: str, workflow_id: str) -> str: + raw_workflow = await get_raw_workflow(tenant_id, workflow_id) workflow_yaml = yaml.safe_load(raw_workflow) valid_workflow_yaml = {"workflow": workflow_yaml} return yaml.dump(valid_workflow_yaml, width=99999) - def get_workflow(self, tenant_id: str, workflow_id: str) -> Workflow: - workflow = get_raw_workflow(tenant_id, workflow_id) + async def get_workflow(self, tenant_id: str, workflow_id: str) -> Workflow: + workflow = await get_raw_workflow(tenant_id, workflow_id) if not workflow: raise HTTPException( status_code=404, detail=f"Workflow {workflow_id} not found", ) workflow_yaml = yaml.safe_load(workflow) - workflow = self.parser.parse(tenant_id, workflow_yaml) + workflow = await self.parser.parse(tenant_id, workflow_yaml) if len(workflow) > 1: raise HTTPException( status_code=500, @@ -126,9 +126,9 @@ def get_workflow(self, tenant_id: str, workflow_id: str) -> Workflow: detail=f"Workflow {workflow_id} not found", ) - def get_workflow_from_dict(self, tenant_id: str, workflow: dict) -> Workflow: + async def get_workflow_from_dict(self, tenant_id: str, workflow: dict) -> Workflow: logging.info("Parsing workflow from dict", extra={"workflow": workflow}) - workflow = self.parser.parse(tenant_id, workflow) + workflow = await self.parser.parse(tenant_id, workflow) if workflow: return workflow[0] else: @@ -158,7 +158,7 @@ def get_all_workflows_yamls(self, tenant_id: str) -> list[str]: workflow_yamls = get_all_workflows_yamls(tenant_id) return workflow_yamls - def get_workflows_from_path( + async def get_workflows_from_path( self, tenant_id, workflow_path: str | tuple[str], @@ -181,25 +181,25 @@ def get_workflows_from_path( for workflow_url in workflow_path: workflow_yaml = self._parse_workflow_to_dict(workflow_url) workflows.extend( - self.parser.parse( + await self.parser.parse( tenant_id, workflow_yaml, providers_file, actions_file ) ) elif os.path.isdir(workflow_path): workflows.extend( - self._get_workflows_from_directory( + await self._get_workflows_from_directory( tenant_id, workflow_path, providers_file, actions_file ) ) else: workflow_yaml = self._parse_workflow_to_dict(workflow_path) - workflows = self.parser.parse( + workflows = await self.parser.parse( tenant_id, workflow_yaml, providers_file, actions_file ) return workflows - def _get_workflows_from_directory( + async def _get_workflows_from_directory( self, tenant_id, workflows_dir: str, From 317c9d3b69053b269e455f0e16519bfe11136c16 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:29:04 +0100 Subject: [PATCH 02/75] Fix --- keep/workflowmanager/workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep/workflowmanager/workflow.py b/keep/workflowmanager/workflow.py index ed5f86375..5c238511c 100644 --- a/keep/workflowmanager/workflow.py +++ b/keep/workflowmanager/workflow.py @@ -107,7 +107,7 @@ async def run(self, workflow_execution_id): }, ) raise - actions_firing, actions_errors = self.run_actions() + actions_firing, actions_errors = await self.run_actions() self.logger.info(f"Finish to run workflow {self.workflow_id}") return actions_errors From c363c0825d8a2c7a57ce4f00bf484c6f95a6340b Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:30:35 +0100 Subject: [PATCH 03/75] Fix imports --- keep/api/routes/workflows.py | 2 -- .../clickhouse_http_provider/clickhouse_http_provider.py | 9 ++++----- keep/workflowmanager/workflowscheduler.py | 1 - 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py index 863bbcf10..34f409d1d 100644 --- a/keep/api/routes/workflows.py +++ b/keep/api/routes/workflows.py @@ -20,7 +20,6 @@ from sqlmodel import Session from keep.api.core.db import ( - get_alert_by_event_id, get_installed_providers, get_last_workflow_workflow_to_alert_executions, get_session, @@ -36,7 +35,6 @@ WorkflowExecutionLogsDTO, WorkflowToAlertExecutionDTO, ) -from keep.api.utils.enrichment_helpers import convert_db_alerts_to_dto_alerts from keep.api.utils.pagination import WorkflowExecutionsPaginatedResultsDto from keep.identitymanager.authenticatedentity import AuthenticatedEntity from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory diff --git a/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py index c9ff13cb1..6eca33cbc 100644 --- a/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py +++ b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py @@ -2,7 +2,6 @@ Clickhouse is a class that provides a way to read data from Clickhouse. """ -import os import asyncio import pydantic import dataclasses @@ -96,8 +95,8 @@ async def __generate_client(self): """ Generates a Clickhouse client. """ - if self.context_manager.tenant_id + self.provider_id in ClickhouseProvider.SHARED_CLIENT: - return ClickhouseProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] + if self.context_manager.tenant_id + self.provider_id in ClickhouseHttpProvider.SHARED_CLIENT: + return ClickhouseHttpProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] user = self.authentication_config.username password = self.authentication_config.password @@ -112,7 +111,7 @@ async def __generate_client(self): password=password, database=database, ) - ClickhouseProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] = client + ClickhouseHttpProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] = client return client @@ -120,7 +119,7 @@ def validate_config(self): """ Validates required configuration for Clickhouse's provider. """ - self.authentication_config = ClickhouseProviderAuthConfig( + self.authentication_config = ClickhouseHttpProviderAuthConfig( **self.config.authentication ) return True diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index ab88a8e7b..42daed3e7 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -4,7 +4,6 @@ import logging import queue import threading -import time import typing import uuid from threading import Lock From 1068568079c63c83d55a21936ba669a1d9568e1b Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:32:44 +0100 Subject: [PATCH 04/75] Fix --- .../clickhouse_http_provider/clickhouse_http_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py index 6eca33cbc..08e0a4daf 100644 --- a/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py +++ b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py @@ -78,7 +78,7 @@ def validate_scopes(self): try: client = asyncio.run(self.__generate_client()) - tables = result = asyncio.run(client.query("SHOW TABLES")) + tables = asyncio.run(client.query("SHOW TABLES")) self.logger.info(f"Tables: {tables}") scopes = { From 8a251b5c722c222b33eacc3f1a754d8663dfe680 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:46:11 +0100 Subject: [PATCH 05/75] Async connection string --- keep/api/core/db_utils.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/keep/api/core/db_utils.py b/keep/api/core/db_utils.py index cbc6c61d9..739fe7113 100644 --- a/keep/api/core/db_utils.py +++ b/keep/api/core/db_utils.py @@ -125,6 +125,27 @@ def dumps(_json) -> str: return json.dumps(_json, default=str) +def asynchronize_connection_string(connection_string): + """ + We want to make sure keep is able to work after an update to async. + We also may assume some customers hardcoded async drivers to the connection strings + so we substitute sync drivers to async on the fly. + """ + if connection_string.startswith('sqlite:'): + connection_string = connection_string.replace('sqlite:', 'sqlite+aiosqlite:', 1) + logging.warning(f"DB connection string updated to: {connection_string}") + + if connection_string.startswith('postgresql:'): + connection_string = connection_string.replace('postgresql+psycopg2:', 'postgresql+psycopg:', 1) + logging.warning(f"DB connection string updated to: {connection_string}") + + if connection_string.startswith('mysql:'): + connection_string = connection_string.replace('postgresql+psycopg2:', 'postgresql+psycopg:', 1) + return connection_string.replace('mysql:', 'mysql+asyncmy:', 1) + + return connection_string + + def create_db_engine(_async=False): """ Creates a database engine based on the environment variables. @@ -150,7 +171,7 @@ def create_db_engine(_async=False): try: logger.info(f"Creating a connection pool with size {DB_POOL_SIZE}") engine = creator_method( - DB_CONNECTION_STRING, + asynchronize_connection_string(DB_CONNECTION_STRING), pool_size=DB_POOL_SIZE, max_overflow=DB_MAX_OVERFLOW, json_serializer=dumps, @@ -160,11 +181,11 @@ def create_db_engine(_async=False): # SQLite does not support pool_size except TypeError: engine = creator_method( - DB_CONNECTION_STRING, json_serializer=dumps, echo=DB_ECHO + asynchronize_connection_string(DB_CONNECTION_STRING), json_serializer=dumps, echo=DB_ECHO ) else: engine = creator_method( - "sqlite:///./keep.db", + "sqlite+aiosqlite:///./keep.db", connect_args={"check_same_thread": False}, echo=DB_ECHO, json_serializer=dumps, From fb56fddbd8240ff0772effa3f3458cc3736f3e2d Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:54:45 +0100 Subject: [PATCH 06/75] aiosqlite --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index a02ab24d3..ee967f4bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,6 +89,7 @@ psycopg = "^3.2.3" prometheus-client = "^0.21.1" psycopg2-binary = "^2.9.10" +aiosqlite = "^0.20.0" [tool.poetry.group.dev.dependencies] pre-commit = "^3.0.4" pre-commit-hooks = "^4.4.0" From 779b69b11688e1792872436274a9ed1cbdc82b7f Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 20:59:51 +0100 Subject: [PATCH 07/75] poetry.lock --- poetry.lock | 48 +++++++++++++++++++++++++++++++++--------------- 1 file changed, 33 insertions(+), 15 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6ef1eb261..4dd6dede1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -147,6 +147,24 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "aiosqlite" +version = "0.20.0" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, + {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, +] + +[package.dependencies] +typing_extensions = ">=4.0" + +[package.extras] +dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] + [[package]] name = "alembic" version = "1.14.0" @@ -546,17 +564,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.35.84" +version = "1.35.86" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.84-py3-none-any.whl", hash = "sha256:c94fc8023caf952f8740a48fc400521bba167f883cfa547d985c05fda7223f7a"}, - {file = "boto3-1.35.84.tar.gz", hash = "sha256:9f9bf72d92f7fdd546b974ffa45fa6715b9af7f5c00463e9d0f6ef9c95efe0c2"}, + {file = "boto3-1.35.86-py3-none-any.whl", hash = "sha256:ed59fb4883da167464a5dfbc96e76d571db75e1a7a27d8e7b790c3008b02fcc7"}, + {file = "boto3-1.35.86.tar.gz", hash = "sha256:d61476fdd5a5388503b72c897083310d2329ce088593c4332b571a860be5d155"}, ] [package.dependencies] -botocore = ">=1.35.84,<1.36.0" +botocore = ">=1.35.86,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -565,13 +583,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.84" +version = "1.35.86" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.84-py3-none-any.whl", hash = "sha256:b4dc2ac7f54ba959429e1debbd6c7c2fb2349baa1cd63803f0682f0773dbd077"}, - {file = "botocore-1.35.84.tar.gz", hash = "sha256:f86754882e04683e2e99a6a23377d0dd7f1fc2b2242844b2381dbe4dcd639301"}, + {file = "botocore-1.35.86-py3-none-any.whl", hash = "sha256:77cb4b445e4f424f956c68c688bd3ad527f4d214d51d67ffc8e245f4476d7de0"}, + {file = "botocore-1.35.86.tar.gz", hash = "sha256:951e944eb30284b4593d4da98f70f7b5292ea237e4de0c5a2852946a549b8347"}, ] [package.dependencies] @@ -864,13 +882,13 @@ files = [ [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -5074,13 +5092,13 @@ python-socketio = {version = ">=5.0.0", extras = ["client"]} [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] @@ -5408,4 +5426,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "8e764f85116828d2a0f772cc96ae127707ac1ca6ee061b42823f99d79975dc73" +content-hash = "937a70cf9d203a461ea13b7c7cd974e7ad16a13ed3929af26f7ff16cdf44d03c" From a2b8de2ff6a88e6b56f802af1f8dc3eff1ca4467 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 21:17:15 +0100 Subject: [PATCH 08/75] Fix docs --- docs/deployment/kubernetes/overview.mdx | 2 +- docs/incidents/overview.mdx | 4 -- docs/platform/alerts.mdx | 54 -------------------- docs/platform/overview.mdx | 15 ------ docs/platform/settings.mdx | 41 --------------- docs/platform/support.mdx | 16 ------ keep/api/core/db_utils.py | 30 +++++++---- poetry.lock | 67 ++++++++++++++++++++++++- pyproject.toml | 1 + 9 files changed, 87 insertions(+), 143 deletions(-) delete mode 100644 docs/platform/alerts.mdx delete mode 100644 docs/platform/overview.mdx delete mode 100644 docs/platform/settings.mdx delete mode 100644 docs/platform/support.mdx diff --git a/docs/deployment/kubernetes/overview.mdx b/docs/deployment/kubernetes/overview.mdx index a3d889f07..7f75cffb3 100644 --- a/docs/deployment/kubernetes/overview.mdx +++ b/docs/deployment/kubernetes/overview.mdx @@ -14,6 +14,6 @@ We maintain an opinionated, batteries-included Helm chart, but you can customize ## Next steps - Install Keep on [Kubernetes](/deployment/kubernetes/installation). - Keep's [Helm Chart](https://github.com/keephq/helm-charts). -- Keep with [Kubernetes Secret Manager](/deployment/secret-manager#kubernetes-secret-manager) +- Keep with [Kubernetes Secret Manager](/deployment/configuration#secrets-management) - Deep dive to Keep's kubernetes [Architecture](/deployment/kubernetes/architecture). - Install Keep on [OpenShift](/deployment/kubernetes/openshift). diff --git a/docs/incidents/overview.mdx b/docs/incidents/overview.mdx index 0c9c9a38f..8d4687e38 100644 --- a/docs/incidents/overview.mdx +++ b/docs/incidents/overview.mdx @@ -21,10 +21,6 @@ A brief overview of the incident, optionally enhanced with AI-generated summarie ### (4) Link Similar Incidents Connects related incidents for better visibility into recurring or interconnected issues. - - - - ### (5) Involved Services Lists the services affected by the incident, allowing teams to understand the scope of the impact. diff --git a/docs/platform/alerts.mdx b/docs/platform/alerts.mdx deleted file mode 100644 index 0b729e9dd..000000000 --- a/docs/platform/alerts.mdx +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: "Alerts" -sidebarTitle: Alerts ---- - -## Overview -You can manage Alerts programmatically using the Alerts API. -The alerts page let you manage your alerts in a single pane of glass. - - -## View your alerts - -By connecting Providers, you get a single pane of glass for your alerts: - - - - -## Pushed alerts - - - - -See all of the alerts that were pushed into Keep. - -## Pulled alerts - - - - -See all of the alerts that were pulled by Keep. - - -## Alert history -To see an alert history, just click on the history button: - - - - - -## Go to the original alert -You can see your alert in the origin tool by clicking on "Open Alert": - - - diff --git a/docs/platform/overview.mdx b/docs/platform/overview.mdx deleted file mode 100644 index bb3f3b809..000000000 --- a/docs/platform/overview.mdx +++ /dev/null @@ -1,15 +0,0 @@ ---- -title: "Overview" -sidebarTitle: Overview ---- -Keep is fully open source. If you want to start Keep on your local environment, see the deployment section. -Keep is API first. Everything you do on the UI can be done via API. - -The platform is accessible on https://platform.keephq.dev and let you start the journey of improving your alerts. - -The platform is currently built on top of: - -1. [Providers](/providers/overview) - connect your stack to Keep. -2. [Alerts](/platform/alerts) - single pane of glass for your alerts. -3. [Workflows](/workflows/overview) - create automations on top of your alerts (or regardless). -4. [Settings](/platform/settings) - the settings page (add users, etc). diff --git a/docs/platform/settings.mdx b/docs/platform/settings.mdx deleted file mode 100644 index 6f472a977..000000000 --- a/docs/platform/settings.mdx +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: "Settings" -sidebarTitle: Settings ---- - -# Overview -Setup and configure Keep. - -## Users -Add or remove users from your tenant. - - - - - -## Webhook -View your tenant webhook settings. - - - - - -## SMTP -Configure your SMTP server to send emails. - - - - - -### Get an API Key - - - diff --git a/docs/platform/support.mdx b/docs/platform/support.mdx deleted file mode 100644 index b220d117f..000000000 --- a/docs/platform/support.mdx +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: "Support" -sidebarTitle: Support ---- - -## Overview -You can use the following methods to ask for support/help with anything related with Keep: - - - - You can use the [Keep Slack community](https://slack.keephq.dev) to get support. - - - You can use support@keephq.dev to send inquiries. - - diff --git a/keep/api/core/db_utils.py b/keep/api/core/db_utils.py index 739fe7113..131ecb6ec 100644 --- a/keep/api/core/db_utils.py +++ b/keep/api/core/db_utils.py @@ -131,17 +131,19 @@ def asynchronize_connection_string(connection_string): We also may assume some customers hardcoded async drivers to the connection strings so we substitute sync drivers to async on the fly. """ + if type(connection_string) != str: + return connection_string + if connection_string.startswith('sqlite:'): connection_string = connection_string.replace('sqlite:', 'sqlite+aiosqlite:', 1) logging.warning(f"DB connection string updated to: {connection_string}") - if connection_string.startswith('postgresql:'): + if connection_string.startswith('postgresql+psycopg2:'): connection_string = connection_string.replace('postgresql+psycopg2:', 'postgresql+psycopg:', 1) logging.warning(f"DB connection string updated to: {connection_string}") - if connection_string.startswith('mysql:'): - connection_string = connection_string.replace('postgresql+psycopg2:', 'postgresql+psycopg:', 1) - return connection_string.replace('mysql:', 'mysql+asyncmy:', 1) + if connection_string.startswith('mysql+pymysql:'): + connection_string = connection_string.replace('mysql+pymysql:', 'mysql+asyncmy:', 1) return connection_string @@ -150,28 +152,34 @@ def create_db_engine(_async=False): """ Creates a database engine based on the environment variables. """ - creator_method = create_engine if not _async else create_async_engine + if _async: + creator_method = create_async_engine + db_connecton_string = asynchronize_connection_string(DB_CONNECTION_STRING) + else: + creator_method = create_engine + db_connecton_string = DB_CONNECTION_STRING + if RUNNING_IN_CLOUD_RUN and not KEEP_FORCE_CONNECTION_STRING: engine = creator_method( - "mysql+pymysql://", + "mysql+asyncmy://", creator=__get_conn, echo=DB_ECHO, json_serializer=dumps, pool_size=DB_POOL_SIZE, max_overflow=DB_MAX_OVERFLOW, ) - elif DB_CONNECTION_STRING == "impersonate": + elif db_connecton_string == "impersonate": engine = creator_method( - "mysql+pymysql://", + "mysql+asyncmy://", creator=__get_conn_impersonate, echo=DB_ECHO, json_serializer=dumps, ) - elif DB_CONNECTION_STRING: + elif db_connecton_string: try: logger.info(f"Creating a connection pool with size {DB_POOL_SIZE}") engine = creator_method( - asynchronize_connection_string(DB_CONNECTION_STRING), + db_connecton_string, pool_size=DB_POOL_SIZE, max_overflow=DB_MAX_OVERFLOW, json_serializer=dumps, @@ -181,7 +189,7 @@ def create_db_engine(_async=False): # SQLite does not support pool_size except TypeError: engine = creator_method( - asynchronize_connection_string(DB_CONNECTION_STRING), json_serializer=dumps, echo=DB_ECHO + db_connecton_string, json_serializer=dumps, echo=DB_ECHO ) else: engine = creator_method( diff --git a/poetry.lock b/poetry.lock index 4dd6dede1..4b0c38eb5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -315,6 +315,71 @@ files = [ {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"}, ] +[[package]] +name = "asyncmy" +version = "0.2.10" +description = "A fast asyncio MySQL driver" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "asyncmy-0.2.10-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:c2237c8756b8f374099bd320c53b16f7ec0cee8258f00d72eed5a2cd3d251066"}, + {file = "asyncmy-0.2.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:6e98d4fbf7ea0d99dfecb24968c9c350b019397ba1af9f181d51bb0f6f81919b"}, + {file = "asyncmy-0.2.10-cp310-cp310-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b1b1ee03556c7eda6422afc3aca132982a84706f8abf30f880d642f50670c7ed"}, + {file = "asyncmy-0.2.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e2b97672ea3f0b335c0ffd3da1a5727b530f82f5032cd87e86c3aa3ac6df7f3"}, + {file = "asyncmy-0.2.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c6471ce1f9ae1e6f0d55adfb57c49d0bcf5753a253cccbd33799ddb402fe7da2"}, + {file = "asyncmy-0.2.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10e2a10fe44a2b216a1ae58fbdafa3fed661a625ec3c030c560c26f6ab618522"}, + {file = "asyncmy-0.2.10-cp310-cp310-win32.whl", hash = "sha256:a791ab117787eb075bc37ed02caa7f3e30cca10f1b09ec7eeb51d733df1d49fc"}, + {file = "asyncmy-0.2.10-cp310-cp310-win_amd64.whl", hash = "sha256:bd16fdc0964a4a1a19aec9797ca631c3ff2530013fdcd27225fc2e48af592804"}, + {file = "asyncmy-0.2.10-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7af0f1f31f800a8789620c195e92f36cce4def68ee70d625534544d43044ed2a"}, + {file = "asyncmy-0.2.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:800116ab85dc53b24f484fb644fefffac56db7367a31e7d62f4097d495105a2c"}, + {file = "asyncmy-0.2.10-cp311-cp311-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:39525e9d7e557b83db268ed14b149a13530e0d09a536943dba561a8a1c94cc07"}, + {file = "asyncmy-0.2.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76e199d6b57918999efc702d2dbb182cb7ba8c604cdfc912517955219b16eaea"}, + {file = "asyncmy-0.2.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ca8fdd7dbbf2d9b4c2d3a5fac42b058707d6a483b71fded29051b8ae198a250"}, + {file = "asyncmy-0.2.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0df23db54e38602c803dacf1bbc1dcc4237a87223e659681f00d1a319a4f3826"}, + {file = "asyncmy-0.2.10-cp311-cp311-win32.whl", hash = "sha256:a16633032be020b931acfd7cd1862c7dad42a96ea0b9b28786f2ec48e0a86757"}, + {file = "asyncmy-0.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:cca06212575922216b89218abd86a75f8f7375fc9c28159ea469f860785cdbc7"}, + {file = "asyncmy-0.2.10-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:42295530c5f36784031f7fa42235ef8dd93a75d9b66904de087e68ff704b4f03"}, + {file = "asyncmy-0.2.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:641a853ffcec762905cbeceeb623839c9149b854d5c3716eb9a22c2b505802af"}, + {file = "asyncmy-0.2.10-cp312-cp312-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:c554874223dd36b1cfc15e2cd0090792ea3832798e8fe9e9d167557e9cf31b4d"}, + {file = "asyncmy-0.2.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd16e84391dde8edb40c57d7db634706cbbafb75e6a01dc8b68a63f8dd9e44ca"}, + {file = "asyncmy-0.2.10-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9f6b44c4bf4bb69a2a1d9d26dee302473099105ba95283b479458c448943ed3c"}, + {file = "asyncmy-0.2.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:16d398b1aad0550c6fe1655b6758455e3554125af8aaf1f5abdc1546078c7257"}, + {file = "asyncmy-0.2.10-cp312-cp312-win32.whl", hash = "sha256:59d2639dcc23939ae82b93b40a683c15a091460a3f77fa6aef1854c0a0af99cc"}, + {file = "asyncmy-0.2.10-cp312-cp312-win_amd64.whl", hash = "sha256:4c6674073be97ffb7ac7f909e803008b23e50281131fef4e30b7b2162141a574"}, + {file = "asyncmy-0.2.10-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:85bc4522d8b632cd3327001a00cb24416883fc3905857737b99aa00bc0703fe1"}, + {file = "asyncmy-0.2.10-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:c93768dde803c7c118e6ac1893f98252e48fecad7c20bb7e27d4bdf3d130a044"}, + {file = "asyncmy-0.2.10-cp38-cp38-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:93b6d7db19a093abdeceb454826ff752ce1917288635d5d63519068ef5b2f446"}, + {file = "asyncmy-0.2.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acecd4bbb513a67a94097fd499dac854546e07d2ff63c7fb5f4d2c077e4bdf91"}, + {file = "asyncmy-0.2.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1b4b346c02fca1d160005d4921753bb00ed03422f0c6ec90936c43aad96b7d52"}, + {file = "asyncmy-0.2.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8d393570e1c96ca200075797cc4f80849fc0ea960a45c6035855b1d392f33768"}, + {file = "asyncmy-0.2.10-cp38-cp38-win32.whl", hash = "sha256:c8ee5282af5f38b4dc3ae94a3485688bd6c0d3509ba37226dbaa187f1708e32c"}, + {file = "asyncmy-0.2.10-cp38-cp38-win_amd64.whl", hash = "sha256:10b3dfb119d7a9cb3aaae355c0981e60934f57297ea560bfdb280c5d85f77a9d"}, + {file = "asyncmy-0.2.10-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:244289bd1bea84384866bde50b09fe5b24856640e30a04073eacb71987b7b6ad"}, + {file = "asyncmy-0.2.10-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:6c9d024b160b9f869a21e62c4ef34a7b7a4b5a886ae03019d4182621ea804d2c"}, + {file = "asyncmy-0.2.10-cp39-cp39-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b57594eea942224626203503f24fa88a47eaab3f13c9f24435091ea910f4b966"}, + {file = "asyncmy-0.2.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346192941470ac2d315f97afa14c0131ff846c911da14861baf8a1f8ed541664"}, + {file = "asyncmy-0.2.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:957c2b48c5228e5f91fdf389daf38261a7b8989ad0eb0d1ba4e5680ef2a4a078"}, + {file = "asyncmy-0.2.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:472989d7bfa405c108a7f3c408bbed52306504fb3aa28963d833cb7eeaafece0"}, + {file = "asyncmy-0.2.10-cp39-cp39-win32.whl", hash = "sha256:714b0fdadd72031e972de2bbbd14e35a19d5a7e001594f0c8a69f92f0d05acc9"}, + {file = "asyncmy-0.2.10-cp39-cp39-win_amd64.whl", hash = "sha256:9fb58645d3da0b91db384f8519b16edc7dc421c966ada8647756318915d63696"}, + {file = "asyncmy-0.2.10-pp310-pypy310_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f10c977c60a95bd6ec6b8654e20c8f53bad566911562a7ad7117ca94618f05d3"}, + {file = "asyncmy-0.2.10-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:aab07fbdb9466beaffef136ffabe388f0d295d8d2adb8f62c272f1d4076515b9"}, + {file = "asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:63144322ade68262201baae73ad0c8a06b98a3c6ae39d1f3f21c41cc5287066a"}, + {file = "asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9659d95c6f2a611aec15bdd928950df937bf68bc4bbb68b809ee8924b6756067"}, + {file = "asyncmy-0.2.10-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8ced4bd938e95ede0fb9fa54755773df47bdb9f29f142512501e613dd95cf4a4"}, + {file = "asyncmy-0.2.10-pp38-pypy38_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f76080d5d360635f0c67411fb3fb890d7a5a9e31135b4bb07c6a4e588287b671"}, + {file = "asyncmy-0.2.10-pp38-pypy38_pp73-macosx_14_0_arm64.whl", hash = "sha256:fde04da1a3e656ec7d7656b2d02ade87df9baf88cc1ebeff5d2288f856c086a4"}, + {file = "asyncmy-0.2.10-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:a83383cc6951bcde11c9cdda216a0849d29be2002a8fb6405ea6d9e5ced4ec69"}, + {file = "asyncmy-0.2.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58c3d8c12030c23df93929c8371da818211fa02c7b50cd178960c0a88e538adf"}, + {file = "asyncmy-0.2.10-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e0c8706ff7fc003775f3fc63804ea45be61e9ac9df9fd968977f781189d625ed"}, + {file = "asyncmy-0.2.10-pp39-pypy39_pp73-macosx_13_0_x86_64.whl", hash = "sha256:4651caaee6f4d7a8eb478a0dc460f8e91ab09a2d8d32444bc2b235544c791947"}, + {file = "asyncmy-0.2.10-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:ac091b327f01c38d91c697c810ba49e5f836890d48f6879ba0738040bb244290"}, + {file = "asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:e1d2d9387cd3971297486c21098e035c620149c9033369491f58fe4fc08825b6"}, + {file = "asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a760cb486ddb2c936711325236e6b9213564a9bb5deb2f6949dbd16c8e4d739e"}, + {file = "asyncmy-0.2.10-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1586f26633c05b16bcfc46d86e9875f4941280e12afa79a741cdf77ae4ccfb4d"}, + {file = "asyncmy-0.2.10.tar.gz", hash = "sha256:f4b67edadf7caa56bdaf1c2e6cf451150c0a86f5353744deabe4426fe27aff4e"}, +] + [[package]] name = "attrs" version = "24.3.0" @@ -5426,4 +5491,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "937a70cf9d203a461ea13b7c7cd974e7ad16a13ed3929af26f7ff16cdf44d03c" +content-hash = "bb37f955cf3e7a32bc453b054ad30ee6c745a6c43618c22d93dd23ae1b850fe7" diff --git a/pyproject.toml b/pyproject.toml index ee967f4bb..d5059afcf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,7 @@ prometheus-client = "^0.21.1" psycopg2-binary = "^2.9.10" aiosqlite = "^0.20.0" +asyncmy = "^0.2.10" [tool.poetry.group.dev.dependencies] pre-commit = "^3.0.4" pre-commit-hooks = "^4.4.0" From f19aa046baca398c0eabb032dd3203f61ec2d889 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 21:18:43 +0100 Subject: [PATCH 09/75] fix --- keep/api/core/db_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep/api/core/db_utils.py b/keep/api/core/db_utils.py index 131ecb6ec..d1a5096f5 100644 --- a/keep/api/core/db_utils.py +++ b/keep/api/core/db_utils.py @@ -131,7 +131,7 @@ def asynchronize_connection_string(connection_string): We also may assume some customers hardcoded async drivers to the connection strings so we substitute sync drivers to async on the fly. """ - if type(connection_string) != str: + if type(connection_string) is not str: return connection_string if connection_string.startswith('sqlite:'): From 6ea1a775177a3f732c5b414df1cb1b198765bc46 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 21:26:33 +0100 Subject: [PATCH 10/75] Fix docs --- docs/mint.json | 2 + docs/overview/examples.mdx | 111 -------------------------- docs/providers/getting-started.mdx | 27 ------- docs/providers/what-is-a-provider.mdx | 11 --- docs/workflows/syntax/state.mdx | 3 - 5 files changed, 2 insertions(+), 152 deletions(-) delete mode 100644 docs/overview/examples.mdx delete mode 100644 docs/providers/getting-started.mdx delete mode 100644 docs/providers/what-is-a-provider.mdx delete mode 100644 docs/workflows/syntax/state.mdx diff --git a/docs/mint.json b/docs/mint.json index 6c2e0ff9e..27e42bd2d 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -210,6 +210,7 @@ { "group": "Deployment", "pages": [ + "deployment/getting-started", "deployment/configuration", "deployment/monitoring", { @@ -261,6 +262,7 @@ { "group": "Keep API", "pages": [ + "api-ref/root", { "group": "providers", "pages": [ diff --git a/docs/overview/examples.mdx b/docs/overview/examples.mdx deleted file mode 100644 index fc81bf413..000000000 --- a/docs/overview/examples.mdx +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: "Examples" ---- - -Got an interesting example of how would you use Keep? Feel free to submit a new example issue and we'll credit you when we add it! - - -## Create an incident only if the customer is on Enterprise tier -In this example we will utilize: - -1. Datadog for monitoring -2. OpsGenie for incident management -3. A postgres database that stores the customer tier. - -This example consists of two steps: -1. Connect your tools - Datadog, OpsGenie and Postgres. -2. Create a workflow that is triggered by the alert, runs an SQL query, and decides whether to create an incident. Once the workflow is created, you can upload it via the [Workflows](https://docs.keephq.dev/workflows/overview) page. -```yaml -alert: - id: enterprise-tier-alerts - description: Create an incident only if the customer is enterprise. - triggers: - - type: alert - filters: - - key: source - value: datadog - - key: name - value: YourAlertName - steps: - - name: check-if-customer-is-enterprise - provider: - type: postgres - config: "{{ providers.postgres-prod }}" - with: - # Keep will replace {{ alert.customer_id }} with the customer id - query: "SELECT customer_tier, customer_name FROM customers_table WHERE customer_id = {{ alert.customer_id }} LIMIT 1" - actions: - - name: opsgenie-incident - # trigger only if the customer is enterprise - condition: - - name: verify-true - type: assert - assert: "{{ steps.check-if-customer-is-enterprise.results[0] }} == 'enterprise'" - provider: - type: opsgenie - config: " {{ providers.opsgenie-prod }} " - with: - message: "A new alert on enterprise customer ( {{ steps.check-if-customer-is-enterprise.results[1] }} )" -``` - -## Send a slack message for every Cloudwatch alarm -1. Connect your Cloudwatch(/es) and Slack to Keep. -2. Create a simple Workflow that filters for CloudWatch events and sends a Slack message: -```yaml -workflow: - id: cloudwatch-slack - description: Send a slack message when a cloudwatch alarm is triggered - triggers: - - type: alert - filters: - - key: source - value: cloudwatch - actions: - - name: trigger-slack - provider: - type: slack - config: " {{ providers.slack-prod }} " - with: - message: "Got alarm from aws cloudwatch! {{ alert.name }}" - -``` - - -## Monitor a HTTP service -Suppose you want to monitor an HTTP service. -All you have to do is upload the following workflow: - -```yaml -workflow: - id: monitor-http-service - description: Monitor a HTTP service each 10 seconds - triggers: - - type: interval - value: 10 - steps: - - name: simple-http-request - provider: - type: http - with: - method: GET - url: 'https://YOUR_SERVICE_URL/' - timeout: 2 - verify: true - actions: - - name: trigger-slack - condition: - - name: assert-condition - type: assert - assert: '{{ steps.simple-http-request.results.status_code }} == 200' - provider: - type: slack - config: ' {{ providers.slack-prod }} ' - with: - message: "HTTP Request Status: {{ steps.simple-http-request.results.status_code }}\nHTTP Request Body: {{ steps.simple-http-request.results.body }}" - on-failure: - # Just need a provider we can use to send the failure reason - provider: - type: slack - config: ' {{ providers.slack-prod }} ' - -``` diff --git a/docs/providers/getting-started.mdx b/docs/providers/getting-started.mdx deleted file mode 100644 index fb10c800d..000000000 --- a/docs/providers/getting-started.mdx +++ /dev/null @@ -1,27 +0,0 @@ ---- -Title: "Providers" -sidebarTitle: "Getting Started" -description: "We tried our best to cover all common providers." ---- - -Click [here](https://github.com/keephq/keep/issues/new?assignees=&labels=feature,provider&template=feature_request.md&title=Missing%20PROVIDER_NAME) if you feel like we're missing some and we'll do our best to add them ASAP. - -Common providers include: - - - AWS, GCP, Azure, etc. - - - Sentry, New Relic, Datadog, etc. - - - PagerDuty, OpsGenie, etc. - - - Email, Slack, Discord, Microsoft Teams, etc. - - - MySQL, Postgresql etc - - - diff --git a/docs/providers/what-is-a-provider.mdx b/docs/providers/what-is-a-provider.mdx deleted file mode 100644 index 69a404580..000000000 --- a/docs/providers/what-is-a-provider.mdx +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "❓ What is a Provider" -sidebarTitle: "What is a Provider?" -description: "A Provider is a component of Keep that enables it to interact with third-party products. It is implemented as extensible Python code, making it easy to enhance and customize." ---- - -Providers are core components of Keep that allow Keep to either query data or send notifications to products such as Datadog, Cloudwatch, and Sentry for data querying, and Slack, Email, and PagerDuty for sending notifications about alerts. - -By leveraging Keep Providers, developers are able to integrate Keep with the tools they use and trust, providing them with a flexible and powerful way to manage their alerts. - -![](/images/providers.png) diff --git a/docs/workflows/syntax/state.mdx b/docs/workflows/syntax/state.mdx deleted file mode 100644 index d089c612c..000000000 --- a/docs/workflows/syntax/state.mdx +++ /dev/null @@ -1,3 +0,0 @@ ---- -title: "State" ---- From 8e2701e9f259bce75ccf5390c9e4c55f83e1054f Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 21:31:31 +0100 Subject: [PATCH 11/75] fix tests --- keep/api/routes/workflows.py | 9 +++++---- keep/api/tasks/process_event_task.py | 5 +++-- tests/test_workflow_execution.py | 12 ++++++------ tests/test_workflowmanager.py | 17 +++++++++-------- 4 files changed, 23 insertions(+), 20 deletions(-) diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py index 34f409d1d..abcc7f969 100644 --- a/keep/api/routes/workflows.py +++ b/keep/api/routes/workflows.py @@ -1,3 +1,4 @@ +import asyncio import datetime import logging import os @@ -255,9 +256,9 @@ async def run_workflow_from_definition( workflowstore = WorkflowStore() workflowmanager = WorkflowManager.get_instance() try: - workflow = workflowstore.get_workflow_from_dict( + workflow = asyncio.run(workflowstore.get_workflow_from_dict( tenant_id=tenant_id, workflow=workflow - ) + )) except Exception as e: logger.exception( "Failed to parse workflow", @@ -269,9 +270,9 @@ async def run_workflow_from_definition( ) try: - workflow_execution = workflowmanager.scheduler.handle_workflow_test( + workflow_execution = asyncio.run(workflowmanager.scheduler.handle_workflow_test( workflow, tenant_id, created_by - ) + )) except Exception as e: logger.exception( "Failed to run test workflow", diff --git a/keep/api/tasks/process_event_task.py b/keep/api/tasks/process_event_task.py index 09415a90d..6c7f970a6 100644 --- a/keep/api/tasks/process_event_task.py +++ b/keep/api/tasks/process_event_task.py @@ -1,4 +1,5 @@ # builtins +import asyncio import copy import datetime import json @@ -425,7 +426,7 @@ def __handle_formatted_events( workflow_manager = WorkflowManager.get_instance() # insert the events to the workflow manager process queue logger.info("Adding events to the workflow manager queue") - workflow_manager.insert_events(tenant_id, enriched_formatted_events) + asyncio.run(workflow_manager.insert_events(tenant_id, enriched_formatted_events)) logger.info("Added events to the workflow manager queue") except Exception: logger.exception( @@ -452,7 +453,7 @@ def __handle_formatted_events( # if new grouped incidents were created, we need to push them to the client # if incidents: # logger.info("Adding group alerts to the workflow manager queue") - # workflow_manager.insert_events(tenant_id, grouped_alerts) + # asyncio.run(workflow_manager.insert_events(tenant_id, grouped_alerts)) # logger.info("Added group alerts to the workflow manager queue") except Exception: logger.exception( diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index db801943f..aadd94075 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -251,7 +251,7 @@ def test_workflow_execution( ) # Insert the current alert into the workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) # Wait for the workflow execution to complete workflow_execution = None @@ -432,7 +432,7 @@ def test_workflow_execution_2( ) # Insert the current alert into the workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) assert len(workflow_manager.scheduler.workflows_to_run) == 1 # Wait for the workflow execution to complete @@ -554,7 +554,7 @@ def test_workflow_execution3( time.sleep(1) # Insert the current alert into the workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) # Wait for the workflow execution to complete workflow_execution = None @@ -670,7 +670,7 @@ def test_workflow_execution_with_disabled_workflow( # Sleep one second to avoid the case where tier0 alerts are not triggered time.sleep(1) - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) enabled_workflow_execution = None disabled_workflow_execution = None @@ -925,7 +925,7 @@ def test_workflow_execution_logs( ) # Insert the current alert into the workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) # Wait for the workflow execution to complete workflow_execution = None @@ -1004,7 +1004,7 @@ def test_workflow_execution_logs_log_level_debug_console_provider( ) # Insert the current alert into the workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) # Wait for the workflow execution to complete workflow_execution = None diff --git a/tests/test_workflowmanager.py b/tests/test_workflowmanager.py index 2b4868146..d67635906 100644 --- a/tests/test_workflowmanager.py +++ b/tests/test_workflowmanager.py @@ -1,3 +1,4 @@ +import asyncio import pytest from unittest.mock import Mock, patch from fastapi import HTTPException @@ -25,9 +26,9 @@ def test_get_workflow_from_dict(): tenant_id = "test_tenant" workflow_path = str(path_to_test_resources / "db_disk_space_for_testing.yml") workflow_dict = workflow_store._parse_workflow_to_dict(workflow_path=workflow_path) - result = workflow_store.get_workflow_from_dict( + result = asyncio.run(workflow_store.get_workflow_from_dict( tenant_id=tenant_id, workflow=workflow_dict - ) + )) mock_parser.parse.assert_called_once_with(tenant_id, workflow_dict) assert result.id == "workflow1" @@ -44,9 +45,9 @@ def test_get_workflow_from_dict_raises_exception(): workflow_dict = workflow_store._parse_workflow_to_dict(workflow_path=workflow_path) with pytest.raises(HTTPException) as exc_info: - workflow_store.get_workflow_from_dict( + asyncio.run(workflow_store.get_workflow_from_dict( tenant_id=tenant_id, workflow=workflow_dict - ) + )) assert exc_info.value.status_code == 500 assert exc_info.value.detail == "Unable to parse workflow from dict" @@ -111,11 +112,11 @@ def test_handle_workflow_test(): with patch.object(threading, "Thread", wraps=threading.Thread) as mock_thread: with patch.object(queue, "Queue", wraps=queue.Queue) as mock_queue: - result = workflow_scheduler.handle_workflow_test( + result = asyncio.run(workflow_scheduler.handle_workflow_test( workflow=mock_workflow, tenant_id=tenant_id, triggered_by_user=triggered_by_user, - ) + )) mock_workflow_manager._run_workflow.assert_called_once_with( mock_workflow, 123, True @@ -154,11 +155,11 @@ def test_handle_workflow_test_with_error(): with patch.object(threading, "Thread", wraps=threading.Thread) as mock_thread: with patch.object(queue, "Queue", wraps=queue.Queue) as mock_queue: - result = workflow_scheduler.handle_workflow_test( + result = asyncio.run(workflow_scheduler.handle_workflow_test( workflow=mock_workflow, tenant_id=tenant_id, triggered_by_user=triggered_by_user, - ) + )) mock_workflow_manager._run_workflow.assert_called_once_with( mock_workflow, 123, True From 995d1e916706783d41c20be724d87c7ce13e2806 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Sun, 22 Dec 2024 22:33:40 +0100 Subject: [PATCH 12/75] Tests --- keep/api/tasks/process_event_task.py | 3 +- poetry.lock | 2 +- pyproject.toml | 1 + tests/test_workflow_execution.py | 45 ++++++++++++++-------------- 4 files changed, 26 insertions(+), 25 deletions(-) diff --git a/keep/api/tasks/process_event_task.py b/keep/api/tasks/process_event_task.py index 6c7f970a6..5c3db63c7 100644 --- a/keep/api/tasks/process_event_task.py +++ b/keep/api/tasks/process_event_task.py @@ -426,7 +426,8 @@ def __handle_formatted_events( workflow_manager = WorkflowManager.get_instance() # insert the events to the workflow manager process queue logger.info("Adding events to the workflow manager queue") - asyncio.run(workflow_manager.insert_events(tenant_id, enriched_formatted_events)) + loop = asyncio.get_event_loop() + loop.run(workflow_manager.insert_events(tenant_id, enriched_formatted_events)) logger.info("Added events to the workflow manager queue") except Exception: logger.exception( diff --git a/poetry.lock b/poetry.lock index 4b0c38eb5..ec2516964 100644 --- a/poetry.lock +++ b/poetry.lock @@ -5491,4 +5491,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "bb37f955cf3e7a32bc453b054ad30ee6c745a6c43618c22d93dd23ae1b850fe7" +content-hash = "2ef4525b13d55ac197afa2f6537d8ab9e0589eb756602540631e1f1a9e074c7f" diff --git a/pyproject.toml b/pyproject.toml index d5059afcf..6f3ebd716 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,6 +91,7 @@ psycopg2-binary = "^2.9.10" aiosqlite = "^0.20.0" asyncmy = "^0.2.10" +pytest-asyncio = "^0.25.0" [tool.poetry.group.dev.dependencies] pre-commit = "^3.0.4" pre-commit-hooks = "^4.4.0" diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index aadd94075..221703dfb 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -1,5 +1,6 @@ import asyncio import logging +import threading import time from collections import defaultdict from datetime import datetime, timedelta @@ -74,7 +75,6 @@ Alert details: {{ alert }}" """ - @pytest.fixture(scope="module") def workflow_manager(): """ @@ -82,10 +82,7 @@ def workflow_manager(): It starts the manager asynchronously and stops it after all tests are completed. """ manager = WorkflowManager.get_instance() - asyncio.run(manager.start()) - while not manager.started: - time.sleep(0.1) - yield manager + yield manager manager.stop() @@ -258,9 +255,9 @@ def test_workflow_execution( count = 0 status = None while workflow_execution is None and count < 30 and status != "success": - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-time-check" - ) + )) if workflow_execution is not None: status = workflow_execution.status time.sleep(1) @@ -440,10 +437,10 @@ def test_workflow_execution_2( count = 0 status = None while workflow_execution is None and count < 30 and status != "success": - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, workflow_id, - ) + )) if workflow_execution is not None: status = workflow_execution.status time.sleep(1) @@ -511,7 +508,8 @@ def test_workflow_execution_2( ], indirect=["test_app", "db_session"], ) -def test_workflow_execution3( +@pytest.mark.asyncio +async def test_workflow_execution3( db_session, test_app, create_alert, @@ -554,19 +552,20 @@ def test_workflow_execution3( time.sleep(1) # Insert the current alert into the workflow manager - asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) + await workflow_manager.start() + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for the workflow execution to complete workflow_execution = None count = 0 status = None while workflow_execution is None and count < 30 and status != "success": - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-first-time" ) if workflow_execution is not None: status = workflow_execution.status - time.sleep(1) + await asyncio.sleep(1) count += 1 # Check if the workflow execution was successful @@ -679,12 +678,12 @@ def test_workflow_execution_with_disabled_workflow( while ( enabled_workflow_execution is None and disabled_workflow_execution is None ) and count < 30: - enabled_workflow_execution = get_last_workflow_execution_by_workflow_id( + enabled_workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, enabled_id - ) - disabled_workflow_execution = get_last_workflow_execution_by_workflow_id( + )) + disabled_workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, disabled_id - ) + )) time.sleep(1) count += 1 @@ -782,9 +781,9 @@ def wait_workflow_execution(workflow_id): count = 0 status = None while workflow_execution is None and count < 30 and status != "success": - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, workflow_id - ) + )) if workflow_execution is not None: status = workflow_execution.status time.sleep(1) @@ -932,9 +931,9 @@ def test_workflow_execution_logs( count = 0 status = None while workflow_execution is None and count < 30 and status != "success": - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "susu-and-sons" - ) + )) if workflow_execution is not None: status = workflow_execution.status time.sleep(1) @@ -1012,9 +1011,9 @@ def test_workflow_execution_logs_log_level_debug_console_provider( status = None time.sleep(1) while workflow_execution is None and count < 30 and status != "success": - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "susu-and-sons" - ) + )) if workflow_execution is not None: status = workflow_execution.status time.sleep(1) From dcd7216a4d21d64acdf74d93400689b06df1e1c1 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 15:22:08 +0100 Subject: [PATCH 13/75] So... --- keep/api/core/db_utils.py | 5 +++-- tests/conftest.py | 17 ++++++++++++++--- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/keep/api/core/db_utils.py b/keep/api/core/db_utils.py index d1a5096f5..e4dd8c482 100644 --- a/keep/api/core/db_utils.py +++ b/keep/api/core/db_utils.py @@ -136,14 +136,15 @@ def asynchronize_connection_string(connection_string): if connection_string.startswith('sqlite:'): connection_string = connection_string.replace('sqlite:', 'sqlite+aiosqlite:', 1) - logging.warning(f"DB connection string updated to: {connection_string}") + logging.error(f"DB connection string updated to: {connection_string} to support async.") if connection_string.startswith('postgresql+psycopg2:'): connection_string = connection_string.replace('postgresql+psycopg2:', 'postgresql+psycopg:', 1) - logging.warning(f"DB connection string updated to: {connection_string}") + logging.error(f"DB connection string updated to: {connection_string} to support async.") if connection_string.startswith('mysql+pymysql:'): connection_string = connection_string.replace('mysql+pymysql:', 'mysql+asyncmy:', 1) + logging.error(f"DB connection string updated to: {connection_string} to support async.") return connection_string diff --git a/tests/conftest.py b/tests/conftest.py index c3c831948..141ba508d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import asyncio import inspect import os import random @@ -15,9 +16,11 @@ from sqlalchemy.orm import sessionmaker from sqlalchemy.pool import StaticPool from sqlmodel import Session, SQLModel, create_engine +from sqlalchemy.ext.asyncio import create_async_engine from starlette_context import context, request_cycle_context # This import is required to create the tables +from keep.api.core.db_utils import asynchronize_connection_string from keep.api.core.dependencies import SINGLE_TENANT_UUID from keep.api.core.elastic import ElasticClient from keep.api.models.db.alert import * @@ -221,14 +224,16 @@ def db_session(request, monkeypatch): ) t.append_constraint(status_index) mock_engine = create_engine(db_connection_string) + mock_engine_async = create_async_engine(asynchronize_connection_string(db_connection_string)) # sqlite else: - db_connection_string = "sqlite:///:memory:" + db_connection_string = "sqlite:///file:shared_memory?mode=memory&cache=shared&uri=true" mock_engine = create_engine( db_connection_string, connect_args={"check_same_thread": False}, poolclass=StaticPool, ) + mock_engine_async = create_async_engine(asynchronize_connection_string(db_connection_string)) # @tb: leaving this here if anybody else gets to problem with nested transactions # https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#serializable-isolation-savepoints-transactional-ddl @@ -314,9 +319,15 @@ def do_begin(conn): session.add_all(workflow_data) session.commit() + def mock_create_engine(_async=False): + if _async: + return mock_engine_async + return mock_engine + with patch("keep.api.core.db.engine", mock_engine): - with patch("keep.api.core.db_utils.create_db_engine", return_value=mock_engine): - yield session + with patch("keep.api.core.db.engine_async", mock_engine_async): + with patch("keep.api.core.db_utils.create_db_engine", side_effect=mock_create_engine): + yield session import logging From ce6f807aa9559ca7eb76ab7b7ffda6eeae6e8dcb Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 16:17:52 +0100 Subject: [PATCH 14/75] Fix tests --- keep/api/core/db.py | 11 +++++------ keep/api/routes/workflows.py | 2 +- keep/contextmanager/contextmanager.py | 2 +- keep/step/step.py | 6 ++++-- keep/workflowmanager/workflowstore.py | 3 ++- tests/test_contextmanager.py | 6 +++--- tests/test_parser.py | 23 ++++++++++++----------- tests/test_steps.py | 12 ++++++------ 8 files changed, 34 insertions(+), 31 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 191ec8448..74e04c0e0 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -483,7 +483,7 @@ def get_last_workflow_workflow_to_alert_executions( async def get_last_workflow_execution_by_workflow_id( - tenant_id: str, workflow_id: str + tenant_id: str, workflow_id: str, status: str = None ) -> Optional[WorkflowExecution]: async with AsyncSession(engine_async) as session: q = select(WorkflowExecution).filter( @@ -493,14 +493,13 @@ async def get_last_workflow_execution_by_workflow_id( ).filter(WorkflowExecution.status == "success").order_by( WorkflowExecution.started.desc() ) - workflow_execution = ( - (await session.exec(q)).first() - ) if status: - query = query.filter(WorkflowExecution.status == status) + q = q.filter(WorkflowExecution.status == status) - workflow_execution = query.first() + workflow_execution = ( + (await session.exec(q)).first() + ) return workflow_execution diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py index abcc7f969..4602f7951 100644 --- a/keep/api/routes/workflows.py +++ b/keep/api/routes/workflows.py @@ -479,7 +479,7 @@ async def update_workflow_by_id( """ tenant_id = authenticated_entity.tenant_id logger.info(f"Updating workflow {workflow_id}", extra={"tenant_id": tenant_id}) - workflow_from_db = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id) + workflow_from_db = await get_workflow(tenant_id=tenant_id, workflow_id=workflow_id) if not workflow_from_db: logger.warning( f"Tenant tried to update workflow {workflow_id} that does not exist", diff --git a/keep/contextmanager/contextmanager.py b/keep/contextmanager/contextmanager.py index a4a466582..a6cf1c337 100644 --- a/keep/contextmanager/contextmanager.py +++ b/keep/contextmanager/contextmanager.py @@ -259,7 +259,7 @@ def set_step_vars(self, step_id, _vars): self.steps_context[step_id]["vars"] = _vars async def get_last_workflow_run(self, workflow_id): - return get_last_workflow_execution_by_workflow_id(self.tenant_id, workflow_id) + return await get_last_workflow_execution_by_workflow_id(self.tenant_id, workflow_id) def dump(self): self.logger.info("Dumping logs to db") diff --git a/keep/step/step.py b/keep/step/step.py index 004d5ebc7..c45199486 100644 --- a/keep/step/step.py +++ b/keep/step/step.py @@ -286,8 +286,10 @@ async def _run_single(self): self.context_manager.set_step_provider_paremeters( self.step_id, rendered_providers_parameters ) - except Exception as e: - raise StepError(e) + except KeyError: + pass + # except Exception as e: + # raise StepError(e) return True diff --git a/keep/workflowmanager/workflowstore.py b/keep/workflowmanager/workflowstore.py index e75884a1a..48974492e 100644 --- a/keep/workflowmanager/workflowstore.py +++ b/keep/workflowmanager/workflowstore.py @@ -1,3 +1,4 @@ +import asyncio import io import logging import os @@ -63,7 +64,7 @@ def create_workflow(self, tenant_id: str, created_by, workflow: dict): def delete_workflow(self, tenant_id, workflow_id): self.logger.info(f"Deleting workflow {workflow_id}") - workflow = get_workflow(tenant_id, workflow_id) + workflow = asyncio.run(get_workflow(tenant_id, workflow_id)) if not workflow: raise HTTPException( status_code=404, detail=f"Workflow {workflow_id} not found" diff --git a/tests/test_contextmanager.py b/tests/test_contextmanager.py index fad627e5d..4691a8556 100644 --- a/tests/test_contextmanager.py +++ b/tests/test_contextmanager.py @@ -180,15 +180,15 @@ def test_context_manager_set_step_context(context_manager: ContextManager): assert context_manager.steps_context["this"]["results"] == results assert context_manager.steps_context[step_id]["results"] == results - -def test_context_manager_get_last_alert_run( +@pytest.mark.asyncio +async def test_context_manager_get_last_alert_run( context_manager_with_state: ContextManager, db_session ): workflow_id = "test-id-1" alert_context = {"mock": "mock"} alert_status = "firing" context_manager_with_state.tenant_id = SINGLE_TENANT_UUID - last_run = context_manager_with_state.get_last_workflow_run(workflow_id) + last_run = await context_manager_with_state.get_last_workflow_run(workflow_id) if last_run is None: pytest.fail("No workflow run found with the given workflow_id") assert last_run == WorkflowExecution( diff --git a/tests/test_parser.py b/tests/test_parser.py index 6e607fe37..dba1a5eeb 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -19,16 +19,16 @@ from keep.step.step import Step from keep.workflowmanager.workflowstore import WorkflowStore - -def test_parse_with_nonexistent_file(db_session): +@pytest.mark.asyncio +async def test_parse_with_nonexistent_file(db_session): workflow_store = WorkflowStore() # Expected error when a given input does not describe an existing file with pytest.raises(HTTPException) as e: - workflow_store.get_workflow(SINGLE_TENANT_UUID, "test-not-found") + await workflow_store.get_workflow(SINGLE_TENANT_UUID, "test-not-found") assert e.value.status_code == 404 - -def test_parse_with_nonexistent_url(monkeypatch): +@pytest.mark.asyncio +async def test_parse_with_nonexistent_url(monkeypatch): # Mocking requests.get to always raise a ConnectionError def mock_get(*args, **kwargs): raise requests.exceptions.ConnectionError @@ -37,7 +37,7 @@ def mock_get(*args, **kwargs): workflow_store = WorkflowStore() # Expected error when a given input does not describe an existing URL with pytest.raises(requests.exceptions.ConnectionError): - workflow_store.get_workflows_from_path( + await workflow_store.get_workflows_from_path( SINGLE_TENANT_UUID, "https://ThisWebsiteDoNotExist.com" ) @@ -46,10 +46,10 @@ def mock_get(*args, **kwargs): workflow_path = str(path_to_test_resources / "db_disk_space_for_testing.yml") providers_path = str(path_to_test_resources / "providers_for_testing.yaml") - -def test_parse_sanity_check(db_session): +@pytest.mark.asyncio +async def test_parse_sanity_check(db_session): workflow_store = WorkflowStore() - parsed_workflows = workflow_store.get_workflows_from_path( + parsed_workflows = await workflow_store.get_workflows_from_path( SINGLE_TENANT_UUID, workflow_path, providers_path ) assert parsed_workflows is not None @@ -302,9 +302,10 @@ def test_parse_alert_steps(self): class TestReusableActionWithWorkflow: - def test_if_action_is_expanded(self, db_session): + @pytest.mark.asyncio + async def test_if_action_is_expanded(self, db_session): workflow_store = WorkflowStore() - workflows = workflow_store.get_workflows_from_path( + workflows = await workflow_store.get_workflows_from_path( tenant_id=SINGLE_TENANT_UUID, workflow_path=reusable_workflow_path, providers_file=reusable_providers_path, diff --git a/tests/test_steps.py b/tests/test_steps.py index 4009680da..2199553ad 100644 --- a/tests/test_steps.py +++ b/tests/test_steps.py @@ -42,21 +42,21 @@ def sample_step(): return step - -def test_run_single(sample_step): +@pytest.mark.asyncio +async def test_run_single(sample_step): # Simulate the result sample_step.provider.query = Mock(return_value="result") # Run the method - result = sample_step._run_single() + result = await sample_step._run_single() # Assertions assert result is True # Action should run successfully sample_step.provider.query.assert_called_with(param1="value1", param2="value2") assert sample_step.provider.query.call_count == 1 - -def test_run_single_exception(sample_step): +@pytest.mark.asyncio +async def test_run_single_exception(sample_step): # Simulate an exception sample_step.provider.query = Mock(side_effect=Exception("Test exception")) @@ -64,7 +64,7 @@ def test_run_single_exception(sample_step): # Run the method and expect an exception to be raised with pytest.raises(StepError): - sample_step._run_single() + await sample_step._run_single() end_time = time.time() execution_time = end_time - start_time From 077fb52141ae762effe06481c55d05f2277abd53 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 18:56:29 +0100 Subject: [PATCH 15/75] asyncio_default_fixture_loop_scope --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 0c6c50291..56a3d3acd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,9 @@ description = "Alerting. for developers, by developers." authors = ["Keep Alerting LTD"] packages = [{include = "keep"}] +[tool.pytest.ini_options] +asyncio_default_fixture_loop_scope = "function" + [tool.poetry.dependencies] python = ">=3.11,<3.12" click = "^8.1.3" From 84028ed8dcf493357e1d4d9fe569792f0df9a800 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 19:11:12 +0100 Subject: [PATCH 16/75] Fix --- tests/test_workflow_execution.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index cf7b8b039..76a8d9c7b 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -1273,9 +1273,9 @@ def test_alert_routing_policy( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-routing-policy" - ) + )) if workflow_execution is not None and workflow_execution.status == "success": break time.sleep(1) @@ -1453,9 +1453,9 @@ def test_nested_conditional_flow( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = get_last_workflow_execution_by_workflow_id( + workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "nested-conditional-flow" - ) + )) if workflow_execution is not None and workflow_execution.status == "success": break From 5810e9c1e9cdf465e68c86996ca0576790c33050 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 19:28:04 +0100 Subject: [PATCH 17/75] Session... --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 56a3d3acd..ed192ee3e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Keep Alerting LTD"] packages = [{include = "keep"}] [tool.pytest.ini_options] -asyncio_default_fixture_loop_scope = "function" +asyncio_default_fixture_loop_scope = "session" [tool.poetry.dependencies] python = ">=3.11,<3.12" From e1bac08f9ee718bddb677fda3befb2814df5ee70 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 20:02:23 +0100 Subject: [PATCH 18/75] Fix --- keep/step/step.py | 6 ++---- tests/test_steps.py | 12 ++++++++++-- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/keep/step/step.py b/keep/step/step.py index c45199486..004d5ebc7 100644 --- a/keep/step/step.py +++ b/keep/step/step.py @@ -286,10 +286,8 @@ async def _run_single(self): self.context_manager.set_step_provider_paremeters( self.step_id, rendered_providers_parameters ) - except KeyError: - pass - # except Exception as e: - # raise StepError(e) + except Exception as e: + raise StepError(e) return True diff --git a/tests/test_steps.py b/tests/test_steps.py index 2199553ad..3fe508390 100644 --- a/tests/test_steps.py +++ b/tests/test_steps.py @@ -45,7 +45,11 @@ def sample_step(): @pytest.mark.asyncio async def test_run_single(sample_step): # Simulate the result - sample_step.provider.query = Mock(return_value="result") + + async def result(*args, **kwargs): + return "result" + + sample_step.provider.query = Mock(side_effect=result) # Run the method result = await sample_step._run_single() @@ -57,8 +61,12 @@ async def test_run_single(sample_step): @pytest.mark.asyncio async def test_run_single_exception(sample_step): + + async def result(*args, **kwargs): + raise Exception("Test exception") + # Simulate an exception - sample_step.provider.query = Mock(side_effect=Exception("Test exception")) + sample_step.provider.query = Mock(side_effect=result) start_time = time.time() From 956d5f8e911cf6b6cee8bbbaafd3427439ac5bcc Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 20:30:16 +0100 Subject: [PATCH 19/75] @pytest.mark.asyncio --- tests/test_enrichments.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_enrichments.py b/tests/test_enrichments.py index 2dd9c873d..edbc10d01 100644 --- a/tests/test_enrichments.py +++ b/tests/test_enrichments.py @@ -52,6 +52,7 @@ def mock_alert_dto(): ) +@pytest.mark.asyncio def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto): # Assuming there are no extraction rules mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = ( @@ -65,6 +66,7 @@ def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto): assert result_event == mock_alert_dto # Assuming no change if no rules +@pytest.mark.asyncio def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto): # Setup an extraction rule that should apply based on the alert content rule = ExtractionRule( @@ -92,6 +94,7 @@ def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto): assert enriched_event.alert_type == "Alert" +@pytest.mark.asyncio def test_run_extraction_rules_event_is_dict(mock_session): event = {"name": "Test Alert", "source": ["source_test"]} rule = ExtractionRule( @@ -118,6 +121,7 @@ def test_run_extraction_rules_event_is_dict(mock_session): ) # Ensuring the attribute is correctly processed +@pytest.mark.asyncio def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = ( [] @@ -131,6 +135,7 @@ def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto): ) # Should return the original event if no rules apply +@pytest.mark.asyncio def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto): rule = ExtractionRule( id=1, @@ -155,6 +160,7 @@ def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto ) # Assuming the code does not modify the event if attribute is not in template format +@pytest.mark.asyncio def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto): rule = ExtractionRule( id=1, @@ -177,6 +183,7 @@ def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto assert enriched_event == mock_alert_dto # Check if event is unchanged +@pytest.mark.asyncio def test_run_extraction_rules_handle_source_special_case(mock_session): event = {"name": "Test Alert", "source": "incorrect_format"} rule = ExtractionRule( @@ -212,6 +219,7 @@ def test_run_extraction_rules_handle_source_special_case(mock_session): #### 2. Testing `run_extraction_rules` with CEL Conditions +@pytest.mark.asyncio def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto): rule = ExtractionRule( id=2, @@ -244,6 +252,7 @@ def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto): assert enriched_event.source_name == "test_source" +@pytest.mark.asyncio def test_run_mapping_rules_applies(mock_session, mock_alert_dto): # Setup a mapping rule rule = MappingRule( @@ -267,6 +276,7 @@ def test_run_mapping_rules_applies(mock_session, mock_alert_dto): assert mock_alert_dto.service == "new_service" +@pytest.mark.asyncio def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto): rule = MappingRule( id=1, @@ -311,6 +321,7 @@ def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto): ), "Service should not match any entry" +@pytest.mark.asyncio def test_run_mapping_rules_no_match(mock_session, mock_alert_dto): rule = MappingRule( id=1, @@ -339,6 +350,7 @@ def test_run_mapping_rules_no_match(mock_session, mock_alert_dto): ), "Service should not match any entry" +@pytest.mark.asyncio def test_check_matcher_with_and_condition(mock_session, mock_alert_dto): # Setup a mapping rule with && condition in matchers rule = MappingRule( @@ -376,6 +388,7 @@ def test_check_matcher_with_and_condition(mock_session, mock_alert_dto): assert result is False +@pytest.mark.asyncio def test_check_matcher_with_or_condition(mock_session, mock_alert_dto): # Setup a mapping rule with || condition in matchers rule = MappingRule( @@ -428,6 +441,7 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto): ], indirect=True, ) +@pytest.mark.asyncio def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts): import os @@ -459,6 +473,7 @@ def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts): @pytest.mark.parametrize("test_app", ["NO_AUTH"], indirect=True) +@pytest.mark.asyncio def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client): # add some rule rule = MappingRule( @@ -497,6 +512,7 @@ def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client @pytest.mark.parametrize("test_app", ["NO_AUTH"], indirect=True) +@pytest.mark.asyncio def test_disposable_enrichment(db_session, client, test_app, mock_alert_dto): # SHAHAR: there is a voodoo so that you must do something with the db_session to kick it off rule = MappingRule( @@ -585,6 +601,7 @@ def test_disposable_enrichment(db_session, client, test_app, mock_alert_dto): assert alert["status"] == "firing" +@pytest.mark.asyncio def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto): # Mock a TopologyService with dependencies to simulate the DB structure mock_topology_service = TopologyService( @@ -644,6 +661,7 @@ def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto): ) +@pytest.mark.asyncio def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto): # Setup a mapping rule with complex matchers rule = MappingRule( @@ -702,6 +720,7 @@ def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto): assert not hasattr(mock_alert_dto, "service") +@pytest.mark.asyncio def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto): # Setup a mapping rule with complex matchers and multiple enrichment fields rule = MappingRule( From 3db5d519794df0619e49dbbfbdf3dacbf7f2c3a8 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 20:55:57 +0100 Subject: [PATCH 20/75] function --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ed192ee3e..56a3d3acd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Keep Alerting LTD"] packages = [{include = "keep"}] [tool.pytest.ini_options] -asyncio_default_fixture_loop_scope = "session" +asyncio_default_fixture_loop_scope = "function" [tool.poetry.dependencies] python = ">=3.11,<3.12" From 881e5cf6e887415d5aa6103b69e8233343d63156 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 21:03:35 +0100 Subject: [PATCH 21/75] mock_session.db_session = db_session --- tests/test_enrichments.py | 51 ++++++++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 17 deletions(-) diff --git a/tests/test_enrichments.py b/tests/test_enrichments.py index edbc10d01..a8bdac258 100644 --- a/tests/test_enrichments.py +++ b/tests/test_enrichments.py @@ -53,11 +53,12 @@ def mock_alert_dto(): @pytest.mark.asyncio -def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto): +async def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto, db_session): # Assuming there are no extraction rules mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = ( [] ) + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) result_event = enrichment_bl.run_extraction_rules(mock_alert_dto) @@ -67,7 +68,7 @@ def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto): +def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto, db_session): # Setup an extraction rule that should apply based on the alert content rule = ExtractionRule( id=1, @@ -82,6 +83,7 @@ def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -95,7 +97,7 @@ def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_extraction_rules_event_is_dict(mock_session): +def test_run_extraction_rules_event_is_dict(mock_session, db_session): event = {"name": "Test Alert", "source": ["source_test"]} rule = ExtractionRule( id=1, @@ -109,6 +111,7 @@ def test_run_extraction_rules_event_is_dict(mock_session): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -122,10 +125,11 @@ def test_run_extraction_rules_event_is_dict(mock_session): @pytest.mark.asyncio -def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto): +def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto, db_session): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = ( [] ) + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) result_event = enrichment_bl.run_extraction_rules(mock_alert_dto) @@ -136,7 +140,7 @@ def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto): +def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto, db_session): rule = ExtractionRule( id=1, tenant_id="test_tenant", @@ -149,6 +153,7 @@ def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -161,7 +166,7 @@ def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto @pytest.mark.asyncio -def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto): +def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto, db_session): rule = ExtractionRule( id=1, tenant_id="test_tenant", @@ -174,6 +179,7 @@ def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -184,7 +190,7 @@ def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto @pytest.mark.asyncio -def test_run_extraction_rules_handle_source_special_case(mock_session): +def test_run_extraction_rules_handle_source_special_case(mock_session, db_session): event = {"name": "Test Alert", "source": "incorrect_format"} rule = ExtractionRule( id=1, @@ -198,6 +204,7 @@ def test_run_extraction_rules_handle_source_special_case(mock_session): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -220,7 +227,7 @@ def test_run_extraction_rules_handle_source_special_case(mock_session): @pytest.mark.asyncio -def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto): +def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto, db_session): rule = ExtractionRule( id=2, tenant_id="test_tenant", @@ -234,6 +241,7 @@ def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session # Mocking the CEL environment to return True for the condition with patch("chevron.render", return_value="test_source"), patch( @@ -253,7 +261,7 @@ def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_mapping_rules_applies(mock_session, mock_alert_dto): +def test_run_mapping_rules_applies(mock_session, mock_alert_dto, db_session): # Setup a mapping rule rule = MappingRule( id=1, @@ -267,6 +275,7 @@ def test_run_mapping_rules_applies(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -277,7 +286,7 @@ def test_run_mapping_rules_applies(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto): +def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto, db_session): rule = MappingRule( id=1, tenant_id="test_tenant", @@ -293,6 +302,7 @@ def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -322,7 +332,7 @@ def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_mapping_rules_no_match(mock_session, mock_alert_dto): +def test_run_mapping_rules_no_match(mock_session, mock_alert_dto, db_session): rule = MappingRule( id=1, tenant_id="test_tenant", @@ -338,6 +348,7 @@ def test_run_mapping_rules_no_match(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session del mock_alert_dto.service enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -351,7 +362,7 @@ def test_run_mapping_rules_no_match(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_check_matcher_with_and_condition(mock_session, mock_alert_dto): +def test_check_matcher_with_and_condition(mock_session, mock_alert_dto, db_session): # Setup a mapping rule with && condition in matchers rule = MappingRule( id=1, @@ -365,6 +376,7 @@ def test_check_matcher_with_and_condition(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -389,7 +401,7 @@ def test_check_matcher_with_and_condition(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_check_matcher_with_or_condition(mock_session, mock_alert_dto): +def test_check_matcher_with_or_condition(mock_session, mock_alert_dto, db_session): # Setup a mapping rule with || condition in matchers rule = MappingRule( id=1, @@ -406,6 +418,7 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -442,7 +455,7 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto): indirect=True, ) @pytest.mark.asyncio -def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts): +def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts, db_session): import os # first, use elastic @@ -463,6 +476,7 @@ def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id=SINGLE_TENANT_UUID, db=mock_session) @@ -602,7 +616,7 @@ def test_disposable_enrichment(db_session, client, test_app, mock_alert_dto): @pytest.mark.asyncio -def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto): +def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto, db_session): # Mock a TopologyService with dependencies to simulate the DB structure mock_topology_service = TopologyService( id=1, tenant_id="keep", service="test-service", display_name="Test Service" @@ -621,6 +635,7 @@ def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto): # Mock the session to return this topology mapping rule mock_session.query.return_value.filter.return_value.all.return_value = [rule] + mock_session.db_session = db_session # Initialize the EnrichmentsBl class with the mock session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -662,7 +677,7 @@ def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto): +def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto, db_session): # Setup a mapping rule with complex matchers rule = MappingRule( id=1, @@ -688,6 +703,7 @@ def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) @@ -721,7 +737,7 @@ def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto): @pytest.mark.asyncio -def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto): +def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto, db_session): # Setup a mapping rule with complex matchers and multiple enrichment fields rule = MappingRule( id=1, @@ -743,6 +759,7 @@ def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto): mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [ rule ] + mock_session.db_session = db_session enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session) From 87ad9f6e9b85d256bba67ef3c91f19bb64f0a1eb Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 21:53:05 +0100 Subject: [PATCH 22/75] Unbelievable --- keep/workflowmanager/workflowscheduler.py | 4 ---- tests/test_workflow_execution.py | 23 +++++++++++++++-------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 42daed3e7..2db984fdc 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -36,7 +36,6 @@ class WorkflowScheduler: def __init__(self, workflow_manager): self.logger = logging.getLogger(__name__) - self.threads = [] self.workflow_manager = workflow_manager self.workflow_store = WorkflowStore() # all workflows that needs to be run due to alert event @@ -523,9 +522,6 @@ async def run_workflows(self, workflows: typing.List[Workflow]): def stop(self): self.logger.info("Stopping scheduled workflows") self._stop = True - # Now wait for the threads to finish - for thread in self.threads: - thread.join() self.logger.info("Scheduled workflows stopped") async def _run_workflows_with_interval( diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 76a8d9c7b..5717b81d8 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -372,7 +372,8 @@ def test_workflow_execution( ], indirect=["test_app"], ) -def test_workflow_execution_2( +@pytest.mark.asyncio +async def test_workflow_execution_2( db_session, test_app, create_alert, @@ -434,9 +435,11 @@ def test_workflow_execution_2( ) # Insert the current alert into the workflow manager - asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) assert len(workflow_manager.scheduler.workflows_to_run) == 1 + await workflow_manager.start() + # Wait for the workflow execution to complete workflow_execution = None count = 0 @@ -446,15 +449,17 @@ def test_workflow_execution_2( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, workflow_id, - )) + ) if workflow_execution is not None: status = workflow_execution.status - time.sleep(1) + await asyncio.sleep(0.1) count += 1 + workflow_manager.stop() + assert len(workflow_manager.scheduler.workflows_to_run) == 0 # Check if the workflow execution was successful assert workflow_execution is not None @@ -518,7 +523,7 @@ def test_workflow_execution_2( indirect=["test_app", "db_session"], ) @pytest.mark.asyncio -async def test_workflow_execution3( +async def test_workflow_execution_3( db_session, test_app, create_alert, @@ -558,7 +563,7 @@ async def test_workflow_execution3( ) # sleep one second to avoid the case where tier0 alerts are not triggered - time.sleep(1) + await asyncio.sleep(1) # Insert the current alert into the workflow manager await workflow_manager.start() @@ -578,9 +583,11 @@ async def test_workflow_execution3( ) if workflow_execution is not None: status = workflow_execution.status - await asyncio.sleep(1) + await asyncio.sleep(0.1) count += 1 + workflow_manager.stop() + # Check if the workflow execution was successful assert workflow_execution is not None From da66c68e81042f1acc48a523b5a52076bfe24f04 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 22:01:48 +0100 Subject: [PATCH 23/75] fix more --- tests/test_workflow_execution.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 5717b81d8..20e5ba208 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -193,7 +193,8 @@ def setup_workflow_with_two_providers(db_session): ], indirect=["test_app", "db_session"], ) -def test_workflow_execution( +@pytest.mark.asyncio +async def test_workflow_execution( db_session, test_app, create_alert, @@ -237,7 +238,8 @@ def test_workflow_execution( ) create_alert("fp1", alert_status, base_time - timedelta(minutes=time_diff)) - time.sleep(1) + await asyncio.sleep(1) + # Create the current alert current_alert = AlertDto( id="grafana-1", @@ -249,7 +251,8 @@ def test_workflow_execution( ) # Insert the current alert into the workflow manager - asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) + await workflow_manager.start() + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for the workflow execution to complete workflow_execution = None @@ -260,14 +263,16 @@ def test_workflow_execution( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-time-check" - )) + ) if workflow_execution is not None: status = workflow_execution.status - time.sleep(1) + await asyncio.sleep(0.1) count += 1 + workflow_manager.stop() + # Check if the workflow execution was successful assert workflow_execution is not None assert workflow_execution.status == "success" From f82b86bf29192c446439956aae976a11bc2cff17 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 22:55:53 +0100 Subject: [PATCH 24/75] More fixes. --- keep/api/bl/incidents_bl.py | 5 +- keep/api/core/db.py | 9 +-- keep/workflowmanager/workflowmanager.py | 6 +- tests/test_workflow_execution.py | 85 ++++++++++++++++--------- 4 files changed, 64 insertions(+), 41 deletions(-) diff --git a/keep/api/bl/incidents_bl.py b/keep/api/bl/incidents_bl.py index a1b91a8fe..6564506f4 100644 --- a/keep/api/bl/incidents_bl.py +++ b/keep/api/bl/incidents_bl.py @@ -1,3 +1,4 @@ +import asyncio import logging import os import pathlib @@ -153,7 +154,7 @@ def __update_client_on_incident_change(self, incident_id: Optional[UUID] = None) def __run_workflows(self, incident_dto: IncidentDto, action: str): try: workflow_manager = WorkflowManager.get_instance() - workflow_manager.insert_incident(self.tenant_id, incident_dto, action) + asyncio.run(workflow_manager.insert_incident(self.tenant_id, incident_dto, action)) except Exception: self.logger.exception( "Failed to run workflows based on incident", @@ -231,7 +232,7 @@ def delete_incident(self, incident_id: UUID) -> None: try: workflow_manager = WorkflowManager.get_instance() self.logger.info("Adding incident to the workflow manager queue") - workflow_manager.insert_incident(self.tenant_id, incident_dto, "deleted") + asyncio.run(workflow_manager.insert_incident(self.tenant_id, incident_dto, "deleted")) self.logger.info("Added incident to the workflow manager queue") except Exception: self.logger.exception( diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 9869a00fa..d6512c2ed 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -490,11 +490,11 @@ async def get_last_workflow_execution_by_workflow_id( WorkflowExecution.workflow_id == workflow_id ).filter(WorkflowExecution.tenant_id == tenant_id).filter( WorkflowExecution.started >= datetime.now() - timedelta(days=1) - ).filter(WorkflowExecution.status == "success").order_by( + ).order_by( WorkflowExecution.started.desc() ) - if status: + if status is not None: q = q.filter(WorkflowExecution.status == status) workflow_execution = ( @@ -664,10 +664,7 @@ def get_consumer_providers() -> List[Provider]: async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, error): async with AsyncSession(engine_async) as session: workflow_execution = (await session.exec( - select(WorkflowExecution) - .where(WorkflowExecution.tenant_id == tenant_id) - .where(WorkflowExecution.workflow_id == workflow_id) - .where(WorkflowExecution.id == execution_id) + select(WorkflowExecution).where(WorkflowExecution.id == execution_id) )).first() # some random number to avoid collisions if not workflow_execution: diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index c711e5715..25d6f4843 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -94,7 +94,7 @@ async def _get_workflow_from_store(self, tenant_id, workflow_model): }, ) - def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str): + async def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str): all_workflow_models = self.workflow_store.get_all_workflows(tenant_id) self.logger.info( "Got all workflows", @@ -110,7 +110,9 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str): f"tenant_id={workflow_model.tenant_id} - Workflow is disabled." ) continue - workflow = asyncio.run(self._get_workflow_from_store(tenant_id, workflow_model)) + + workflow = await self._get_workflow_from_store(tenant_id, workflow_model) + if workflow is None: continue diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 20e5ba208..bee4f04ee 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -645,7 +645,8 @@ async def test_workflow_execution_3( ], indirect=["test_app"], ) -def test_workflow_execution_with_disabled_workflow( +@pytest.mark.asyncio +async def test_workflow_execution_with_disabled_workflow( db_session, test_app, create_alert, @@ -694,25 +695,31 @@ def test_workflow_execution_with_disabled_workflow( # Sleep one second to avoid the case where tier0 alerts are not triggered time.sleep(1) - asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) + await workflow_manager.start() + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) enabled_workflow_execution = None disabled_workflow_execution = None count = 0 - while ( - enabled_workflow_execution is None and disabled_workflow_execution is None - ) and count < 30: - enabled_workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + found = False + while not found and count < 30: + enabled_workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, enabled_id - )) - disabled_workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + ) + disabled_workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, disabled_id - )) + ) - time.sleep(1) + if enabled_workflow_execution is not None and disabled_workflow_execution is None: + if enabled_workflow_execution.status == "success": + found = True + + await asyncio.sleep(1) count += 1 + workflow_manager.stop() + assert enabled_workflow_execution is not None assert enabled_workflow_execution.status == "success" @@ -769,7 +776,8 @@ def test_workflow_execution_with_disabled_workflow( ], indirect=["test_app"], ) -def test_workflow_incident_triggers( +@pytest.mark.asyncio +async def test_workflow_incident_triggers( db_session, test_app, workflow_manager, @@ -914,7 +922,8 @@ def fake_workflow_adapter( ], indirect=["test_app", "db_session"], ) -def test_workflow_execution_logs( +@pytest.mark.asyncio +async def test_workflow_execution_logs( db_session, test_app, create_alert, @@ -949,8 +958,9 @@ def test_workflow_execution_logs( fingerprint="fp1", ) + await workflow_manager.start() # Insert the current alert into the workflow manager - asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for the workflow execution to complete workflow_execution = None @@ -960,12 +970,14 @@ def test_workflow_execution_logs( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "susu-and-sons" - )) - time.sleep(1) + ) + await asyncio.sleep(1) count += 1 + workflow_manager.stop() + # Check if the workflow execution was successful assert workflow_execution is not None assert workflow_execution.status == "success" @@ -986,7 +998,8 @@ def test_workflow_execution_logs( ], indirect=["test_app", "db_session"], ) -def test_workflow_execution_logs_log_level_debug_console_provider( +@pytest.mark.asyncio +async def test_workflow_execution_logs_log_level_debug_console_provider( db_session, test_app, create_alert, @@ -1030,7 +1043,8 @@ def test_workflow_execution_logs_log_level_debug_console_provider( ) # Insert the current alert into the workflow manager - asyncio.run(workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])) + await workflow_manager.start() + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for the workflow execution to complete workflow_execution = None @@ -1041,12 +1055,14 @@ def test_workflow_execution_logs_log_level_debug_console_provider( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "susu-and-sons" - )) - time.sleep(1) + ) + await asyncio.sleep(1) count += 1 + workflow_manager.stop() + # Check if the workflow execution was successful assert workflow_execution is not None assert workflow_execution.status == "success" @@ -1234,7 +1250,8 @@ def test_workflow_execution_logs_log_level_debug_console_provider( ], indirect=["test_app", "db_session"], ) -def test_alert_routing_policy( +@pytest.mark.asyncio +async def test_alert_routing_policy( db_session, test_app, workflow_manager, @@ -1274,8 +1291,9 @@ def test_alert_routing_policy( monitor_name=alert_data["monitor_name"], ) + await workflow_manager.start() # Insert the alert into workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for workflow execution workflow_execution = None @@ -1285,14 +1303,15 @@ def test_alert_routing_policy( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-routing-policy" - )) + ) if workflow_execution is not None and workflow_execution.status == "success": break - time.sleep(1) + await asyncio.sleep(1) count += 1 + workflow_manager.stop() # Verify workflow execution assert workflow_execution is not None assert workflow_execution.status == "success" @@ -1420,7 +1439,8 @@ def test_alert_routing_policy( ], indirect=["test_app", "db_session"], ) -def test_nested_conditional_flow( +@pytest.mark.asyncio +async def test_nested_conditional_flow( db_session, test_app, workflow_manager, @@ -1455,7 +1475,8 @@ def test_nested_conditional_flow( ) # Insert the alert into workflow manager - workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) + await workflow_manager.start() + await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for workflow execution workflow_execution = None @@ -1465,18 +1486,20 @@ def test_nested_conditional_flow( or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "nested-conditional-flow" - )) + ) if workflow_execution is not None and workflow_execution.status == "success": break elif workflow_execution is not None and workflow_execution.status == "error": raise Exception("Workflow execution failed") - time.sleep(1) + await asyncio.sleep(1) count += 1 + workflow_manager.stop() + # Verify workflow execution assert workflow_execution is not None assert workflow_execution.status == "success" From 8be0a685f44957d62bf4dbf08fa0c7ea3ecd7100 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 23:03:08 +0100 Subject: [PATCH 25/75] All???!! --- tests/test_workflow_execution.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index bee4f04ee..6121d31c5 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -808,7 +808,7 @@ async def test_workflow_incident_triggers( # Insert the current alert into the workflow manager - def wait_workflow_execution(workflow_id): + async def wait_workflow_execution(workflow_id): # Wait for the workflow execution to complete workflow_execution = None count = 0 @@ -817,17 +817,17 @@ def wait_workflow_execution(workflow_id): or workflow_execution.status == "in_progress" and count < 30 ): - workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id( + workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, workflow_id - )) - time.sleep(1) + ) + await asyncio.sleep(1) count += 1 return workflow_execution - - workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "created") + await workflow_manager.start() + await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "created") assert len(workflow_manager.scheduler.workflows_to_run) == 1 - workflow_execution_created = wait_workflow_execution( + workflow_execution_created = await wait_workflow_execution( "incident-triggers-test-created-updated" ) assert workflow_execution_created is not None @@ -837,9 +837,9 @@ def wait_workflow_execution(workflow_id): ] assert len(workflow_manager.scheduler.workflows_to_run) == 0 - workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "updated") + await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "updated") assert len(workflow_manager.scheduler.workflows_to_run) == 1 - workflow_execution_updated = wait_workflow_execution( + workflow_execution_updated = await wait_workflow_execution( "incident-triggers-test-created-updated" ) assert workflow_execution_updated is not None @@ -849,7 +849,7 @@ def wait_workflow_execution(workflow_id): ] # incident-triggers-test-created-updated should not be triggered - workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted") + await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted") assert len(workflow_manager.scheduler.workflows_to_run) == 0 workflow_deleted = Workflow( @@ -864,11 +864,11 @@ def wait_workflow_execution(workflow_id): db_session.add(workflow_deleted) db_session.commit() - workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted") + await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted") assert len(workflow_manager.scheduler.workflows_to_run) == 1 # incident-triggers-test-deleted should be triggered now - workflow_execution_deleted = wait_workflow_execution( + workflow_execution_deleted = await wait_workflow_execution( "incident-triggers-test-deleted" ) assert len(workflow_manager.scheduler.workflows_to_run) == 0 @@ -878,6 +878,7 @@ def wait_workflow_execution(workflow_id): assert workflow_execution_deleted.results["mock-action"] == [ '"deleted incident: incident"\n' ] + workflow_manager.stop() logs_counter = {} From aeef731bb2357c23fb35de48950bae5a443daeba Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 23:09:15 +0100 Subject: [PATCH 26/75] FIx. --- tests/test_workflow_execution.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 6121d31c5..0dd7a3606 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -268,7 +268,7 @@ async def test_workflow_execution( ) if workflow_execution is not None: status = workflow_execution.status - await asyncio.sleep(0.1) + await asyncio.sleep(1) count += 1 workflow_manager.stop() @@ -460,7 +460,7 @@ async def test_workflow_execution_2( ) if workflow_execution is not None: status = workflow_execution.status - await asyncio.sleep(0.1) + await asyncio.sleep(1) count += 1 workflow_manager.stop() @@ -588,7 +588,7 @@ async def test_workflow_execution_3( ) if workflow_execution is not None: status = workflow_execution.status - await asyncio.sleep(0.1) + await asyncio.sleep(1) count += 1 workflow_manager.stop() From 0f9f092130436568536a55eafd8c6a6e8a918bb6 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 23:32:42 +0100 Subject: [PATCH 27/75] Fix --- tests/test_workflow_execution.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 0dd7a3606..a059afd4b 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -250,6 +250,8 @@ async def test_workflow_execution( fingerprint="fp1", ) + time.sleep(1) + # Insert the current alert into the workflow manager await workflow_manager.start() await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) From 08110073c87096d7854c0fb48c9fc2711dc569dc Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Tue, 24 Dec 2024 23:38:46 +0100 Subject: [PATCH 28/75] Fix? --- tests/test_workflow_execution.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index a059afd4b..81e104148 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -273,8 +273,6 @@ async def test_workflow_execution( await asyncio.sleep(1) count += 1 - workflow_manager.stop() - # Check if the workflow execution was successful assert workflow_execution is not None assert workflow_execution.status == "success" @@ -290,6 +288,9 @@ async def test_workflow_execution( assert workflow_execution.results["send-slack-message-tier-1"] == [] assert "Tier 2" in workflow_execution.results["send-slack-message-tier-2"][0] + workflow_manager.stop() + await asyncio.sleep(3) + workflow_definition2 = """workflow: id: %s From 18c297bf71c5862d360106400e83da9ea10894d2 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 00:55:17 +0100 Subject: [PATCH 29/75] Fix? --- keep/api/core/db.py | 10 +++--- keep/cli/cli.py | 3 +- keep/workflowmanager/workflowmanager.py | 7 +++-- keep/workflowmanager/workflowscheduler.py | 5 ++- tests/test_workflow_execution.py | 37 ++++++++++++----------- 5 files changed, 35 insertions(+), 27 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index d6512c2ed..edf7cd6a3 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -486,6 +486,7 @@ async def get_last_workflow_execution_by_workflow_id( tenant_id: str, workflow_id: str, status: str = None ) -> Optional[WorkflowExecution]: async with AsyncSession(engine_async) as session: + await session.flush() q = select(WorkflowExecution).filter( WorkflowExecution.workflow_id == workflow_id ).filter(WorkflowExecution.tenant_id == tenant_id).filter( @@ -1611,13 +1612,12 @@ def update_user_role(tenant_id, username, role): async def save_workflow_results(tenant_id, workflow_execution_id, workflow_results): async with AsyncSession(engine_async) as session: - workflow_execution = (await session.exec( - select(WorkflowExecution) + await session.exec( + update(WorkflowExecution) .where(WorkflowExecution.tenant_id == tenant_id) .where(WorkflowExecution.id == workflow_execution_id) - )).one() - - workflow_execution.results = workflow_results + .values(results=workflow_results) + ) await session.commit() diff --git a/keep/cli/cli.py b/keep/cli/cli.py index de2ddec83..0990072cc 100644 --- a/keep/cli/cli.py +++ b/keep/cli/cli.py @@ -1,3 +1,4 @@ +import asyncio import json import logging import logging.config @@ -436,7 +437,7 @@ def run( "keep_version": KEEP_VERSION, }, ) - workflow_manager.stop() + asyncio.run(workflow_manager.stop()) logger.info("Scheduler stopped") except Exception as e: posthog_client.capture( diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index 25d6f4843..2bb232a1d 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -37,19 +37,22 @@ def __init__(self): self.scheduler = WorkflowScheduler(self) self.workflow_store = WorkflowStore() self.started = False + self._running_task = None async def start(self): """Runs the workflow manager in server mode""" if self.started: self.logger.info("Workflow manager already started") return - asyncio.create_task(self.scheduler.start()) + self._running_task = asyncio.create_task(self.scheduler.start()) self.started = True - def stop(self): + async def stop(self): """Stops the workflow manager""" self.scheduler.stop() self.started = False + if self._running_task is not None: + await self._running_task def _apply_filter(self, filter_val, value): # if it's a regex, apply it diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 2db984fdc..518557132 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -299,6 +299,7 @@ async def _handle_event_workflows(self): # TODO - event workflows should be in DB too, to avoid any state problems. # take out all items from the workflows to run and run them, also, clean the self.workflows_to_run list + tasks = [] with self.lock: workflows_to_run, self.workflows_to_run = self.workflows_to_run, [] for workflow_to_run in workflows_to_run: @@ -493,7 +494,9 @@ async def _handle_event_workflows(self): error=f"Error getting alert by id: {e}", ) continue - asyncio.create_task(self._run_workflow(tenant_id, workflow_id, workflow, workflow_execution_id, event)) + tasks.append(asyncio.create_task(self._run_workflow(tenant_id, workflow_id, workflow, workflow_execution_id, event))) + await asyncio.gather(*tasks) + async def _start(self): self.logger.info("Starting workflows scheduler") diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 81e104148..7b7046fb0 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -77,6 +77,7 @@ """ @pytest.fixture(scope="module") +@pytest.mark.asyncio def workflow_manager(): """ Fixture to create and manage a WorkflowManager instance for the duration of the module. @@ -84,7 +85,7 @@ def workflow_manager(): """ manager = WorkflowManager.get_instance() yield manager - manager.stop() + asyncio.run(manager.stop()) @pytest.fixture @@ -230,6 +231,8 @@ async def test_workflow_execution( """ base_time = datetime.now(tz=pytz.utc) + await workflow_manager.start() + # Create alerts with specified statuses and timestamps alert_statuses.reverse() for time_diff, status in alert_statuses: @@ -250,29 +253,29 @@ async def test_workflow_execution( fingerprint="fp1", ) - time.sleep(1) - # Insert the current alert into the workflow manager - await workflow_manager.start() await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert]) # Wait for the workflow execution to complete workflow_execution = None count = 0 status = None + found = False while ( - workflow_execution is None - or workflow_execution.status == "in_progress" - and count < 30 + not found and count < 30 ): workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-time-check" ) if workflow_execution is not None: - status = workflow_execution.status + if ("send-slack-message-tier-1" in workflow_execution.results and + "send-slack-message-tier-2" in workflow_execution.results): + found = True await asyncio.sleep(1) count += 1 + await workflow_manager.stop() + # Check if the workflow execution was successful assert workflow_execution is not None assert workflow_execution.status == "success" @@ -288,8 +291,6 @@ async def test_workflow_execution( assert workflow_execution.results["send-slack-message-tier-1"] == [] assert "Tier 2" in workflow_execution.results["send-slack-message-tier-2"][0] - workflow_manager.stop() - await asyncio.sleep(3) workflow_definition2 = """workflow: @@ -466,7 +467,7 @@ async def test_workflow_execution_2( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() assert len(workflow_manager.scheduler.workflows_to_run) == 0 # Check if the workflow execution was successful @@ -594,7 +595,7 @@ async def test_workflow_execution_3( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() # Check if the workflow execution was successful @@ -721,7 +722,7 @@ async def test_workflow_execution_with_disabled_workflow( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() assert enabled_workflow_execution is not None assert enabled_workflow_execution.status == "success" @@ -881,7 +882,7 @@ async def wait_workflow_execution(workflow_id): assert workflow_execution_deleted.results["mock-action"] == [ '"deleted incident: incident"\n' ] - workflow_manager.stop() + await workflow_manager.stop() logs_counter = {} @@ -980,7 +981,7 @@ async def test_workflow_execution_logs( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() # Check if the workflow execution was successful assert workflow_execution is not None @@ -1065,7 +1066,7 @@ async def test_workflow_execution_logs_log_level_debug_console_provider( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() # Check if the workflow execution was successful assert workflow_execution is not None @@ -1315,7 +1316,7 @@ async def test_alert_routing_policy( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() # Verify workflow execution assert workflow_execution is not None assert workflow_execution.status == "success" @@ -1502,7 +1503,7 @@ async def test_nested_conditional_flow( await asyncio.sleep(1) count += 1 - workflow_manager.stop() + await workflow_manager.stop() # Verify workflow execution assert workflow_execution is not None From 182933bffc786f1997c44f168779c1b2bb12b6b2 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:07:59 +0100 Subject: [PATCH 30/75] Fix --- keep/api/core/db.py | 33 +++++++++++++++++--------------- tests/test_workflow_execution.py | 3 ++- 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index edf7cd6a3..e3300d6a3 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -664,25 +664,28 @@ def get_consumer_providers() -> List[Provider]: async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, error): async with AsyncSession(engine_async) as session: - workflow_execution = (await session.exec( - select(WorkflowExecution).where(WorkflowExecution.id == execution_id) - )).first() - # some random number to avoid collisions - if not workflow_execution: + random_number = random.randint(1, 2147483647 - 1) # max int + + # Perform the update query + result = await session.execute( + update(WorkflowExecution) + .where(WorkflowExecution.id == execution_id) + .values( + is_running=random_number, + status=status, + error=error[:255] if error else None, + execution_time=(datetime.utcnow() - WorkflowExecution.started).total_seconds() + ) + ) + + # Check if the update affected any rows + if result.rowcount == 0: logger.warning( f"Failed to finish workflow execution {execution_id} for workflow {workflow_id}. Execution not found." ) raise ValueError("Execution not found") - workflow_execution.is_running = random.randint(1, 2147483647 - 1) # max int - workflow_execution.status = status - # TODO: we had a bug with the error field, it was too short so some customers may fail over it. - # we need to fix it in the future, create a migration that increases the size of the error field - # and then we can remove the [:255] from here - workflow_execution.error = error[:255] if error else None - workflow_execution.execution_time = ( - datetime.utcnow() - workflow_execution.started - ).total_seconds() - # TODO: logs + + # Commit the transaction await session.commit() diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 7b7046fb0..20e84c4c5 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -269,7 +269,8 @@ async def test_workflow_execution( ) if workflow_execution is not None: if ("send-slack-message-tier-1" in workflow_execution.results and - "send-slack-message-tier-2" in workflow_execution.results): + "send-slack-message-tier-2" in workflow_execution.results and + workflow_execution.status == "success"): found = True await asyncio.sleep(1) count += 1 From a7e7dca91bec96a24430dd50071cdd01346451e0 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:20:42 +0100 Subject: [PATCH 31/75] Please? --- keep/api/core/db.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index e3300d6a3..343b3e730 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -666,15 +666,19 @@ async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status async with AsyncSession(engine_async) as session: random_number = random.randint(1, 2147483647 - 1) # max int + workflow_execution_old = (await session.exec( + select(WorkflowExecution).where(WorkflowExecution.id == execution_id) + )).first() + # Perform the update query - result = await session.execute( + result = await session.exec( update(WorkflowExecution) .where(WorkflowExecution.id == execution_id) .values( is_running=random_number, status=status, error=error[:255] if error else None, - execution_time=(datetime.utcnow() - WorkflowExecution.started).total_seconds() + execution_time=(datetime.utcnow() - workflow_execution_old.started).total_seconds() ) ) @@ -687,6 +691,7 @@ async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status # Commit the transaction await session.commit() + await session.flush() def get_workflow_executions( @@ -1622,6 +1627,7 @@ async def save_workflow_results(tenant_id, workflow_execution_id, workflow_resul .values(results=workflow_results) ) await session.commit() + await session.flush() def get_workflow_by_name(tenant_id, workflow_name): From 294da8064db9e5de2d0e86d9a70dcf06c1d7645f Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:28:14 +0100 Subject: [PATCH 32/75] Fix? --- keep/workflowmanager/workflowmanager.py | 2 +- keep/workflowmanager/workflowscheduler.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index 2bb232a1d..ce4de8ad4 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -49,7 +49,7 @@ async def start(self): async def stop(self): """Stops the workflow manager""" - self.scheduler.stop() + await self.scheduler.stop() self.started = False if self._running_task is not None: await self._running_task diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 518557132..d7b18f525 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -45,6 +45,7 @@ def __init__(self, workflow_manager): self.interval_enabled = ( config("WORKFLOWS_INTERVAL_ENABLED", default="true") == "true" ) + self.task = None async def start(self, loop=None): self.logger.info("Starting workflows scheduler") @@ -52,7 +53,7 @@ async def start(self, loop=None): self._stop = False if loop is None: loop = asyncio.get_running_loop() - loop.create_task(self._start()) + self.task = loop.create_task(self._start()) self.logger.info("Workflows scheduler started") async def _handle_interval_workflows(self): @@ -522,9 +523,11 @@ async def run_workflows(self, workflows: typing.List[Workflow]): while not self._stop: await asyncio.sleep(1) - def stop(self): + async def stop(self): self.logger.info("Stopping scheduled workflows") self._stop = True + if self.task is not None: + await self.task self.logger.info("Scheduled workflows stopped") async def _run_workflows_with_interval( From 8629ed6e1d1f183fcb9e4ab19f684c226e761478 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:36:57 +0100 Subject: [PATCH 33/75] Fix? --- keep/workflowmanager/workflowmanager.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index ce4de8ad4..3e14c47dc 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -49,10 +49,16 @@ async def start(self): async def stop(self): """Stops the workflow manager""" - await self.scheduler.stop() + try: + await self.scheduler.stop() + except RuntimeError: + logging.error("Can't stop workflowmanager. Probably already stopped.") self.started = False if self._running_task is not None: - await self._running_task + try: + await self._running_task + except RuntimeError: + logging.error("Can't await self._running_task. Probably already awaited.") def _apply_filter(self, filter_val, value): # if it's a regex, apply it From 7e9f104c1884a470692ac7ecc2db111b57d5caeb Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:41:21 +0100 Subject: [PATCH 34/75] Fix --- keep/workflowmanager/workflowscheduler.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index d7b18f525..6ce3d542f 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -527,7 +527,10 @@ async def stop(self): self.logger.info("Stopping scheduled workflows") self._stop = True if self.task is not None: - await self.task + try: + await self.task + except RuntimeError: + logging.error("Trying to await self.task, but looks like it's aleady awaited") self.logger.info("Scheduled workflows stopped") async def _run_workflows_with_interval( From be5e818c3d2731a8fe473f4191945309a7fa47af Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:45:43 +0100 Subject: [PATCH 35/75] No "api-ref/root", --- docs/mint.json | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/mint.json b/docs/mint.json index 27e42bd2d..59101220e 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -262,7 +262,6 @@ { "group": "Keep API", "pages": [ - "api-ref/root", { "group": "providers", "pages": [ From a3adc0668986ff75b4eb847df8c7f543b3f69945 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:52:00 +0100 Subject: [PATCH 36/75] Skip? --- tests/test_workflow_execution.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 20e84c4c5..18e665876 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -1446,6 +1446,7 @@ async def test_alert_routing_policy( indirect=["test_app", "db_session"], ) @pytest.mark.asyncio +@pytest.mark.skip async def test_nested_conditional_flow( db_session, test_app, From fd74522c03e65960afe8f25ca6e6eb2d07988786 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:57:00 +0100 Subject: [PATCH 37/75] More timeout? --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 56a3d3acd..975b1dee3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,9 @@ packages = [{include = "keep"}] [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" +[tool.pytest.ini_options] +timeout = 30 + [tool.poetry.dependencies] python = ">=3.11,<3.12" click = "^8.1.3" From 361465ed1cb1ce8ca29cda57944d7443ddf9cc6e Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 01:58:23 +0100 Subject: [PATCH 38/75] fix --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 975b1dee3..0cff592bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,8 +7,6 @@ packages = [{include = "keep"}] [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" - -[tool.pytest.ini_options] timeout = 30 [tool.poetry.dependencies] From 2b66a893885de20500eaa99a073b5e3e8beedaa0 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:01:25 +0100 Subject: [PATCH 39/75] Fix --- .github/workflows/test-pr.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml index e07f3ebc0..d90712551 100644 --- a/.github/workflows/test-pr.yml +++ b/.github/workflows/test-pr.yml @@ -95,7 +95,7 @@ jobs: LOG_LEVEL: DEBUG SQLALCHEMY_WARN_20: 1 run: | - poetry run coverage run --branch -m pytest --timeout 20 -n auto --non-integration --ignore=tests/e2e_tests/ + poetry run coverage run --branch -m pytest -n auto --non-integration --ignore=tests/e2e_tests/ - name: Run integration tests and report coverage run: | From 0f38b77cab3e92080af5fb13504d8f7d5122912f Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:06:51 +0100 Subject: [PATCH 40/75] 60 sec? --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0cff592bb..7c4cfeecb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ packages = [{include = "keep"}] [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" -timeout = 30 +timeout = 60 [tool.poetry.dependencies] python = ">=3.11,<3.12" From 9528c9f4829f0817f2d7e7d4faabcc39acea576e Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:12:52 +0100 Subject: [PATCH 41/75] Meow --- tests/test_workflow_execution.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 18e665876..f9318dcf7 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -279,8 +279,7 @@ async def test_workflow_execution( # Check if the workflow execution was successful assert workflow_execution is not None - assert workflow_execution.status == "success" - + # Verify if the correct tier action was triggered if expected_tier is None: assert workflow_execution.results["send-slack-message-tier-1"] == [] From 33b89a5f9eb597e8610161957176395f86f045a7 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:17:17 +0100 Subject: [PATCH 42/75] Fix --- tests/test_workflow_execution.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index f9318dcf7..e2536c999 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -279,7 +279,7 @@ async def test_workflow_execution( # Check if the workflow execution was successful assert workflow_execution is not None - + # Verify if the correct tier action was triggered if expected_tier is None: assert workflow_execution.results["send-slack-message-tier-1"] == [] @@ -472,7 +472,6 @@ async def test_workflow_execution_2( assert len(workflow_manager.scheduler.workflows_to_run) == 0 # Check if the workflow execution was successful assert workflow_execution is not None - assert workflow_execution.status == "success" # Verify if the correct action was triggered if expected_action: From 7edae22640e8240c2b992c5317e92601f80121ad Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:22:17 +0100 Subject: [PATCH 43/75] Un-skip --- tests/test_workflow_execution.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index e2536c999..8359217e3 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -1444,7 +1444,6 @@ async def test_alert_routing_policy( indirect=["test_app", "db_session"], ) @pytest.mark.asyncio -@pytest.mark.skip async def test_nested_conditional_flow( db_session, test_app, From 7c0a54a0cd3f584c138a706a584cc34802e4b758 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:31:13 +0100 Subject: [PATCH 44/75] Fix --- tests/test_enrichments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_enrichments.py b/tests/test_enrichments.py index a8bdac258..c808473bd 100644 --- a/tests/test_enrichments.py +++ b/tests/test_enrichments.py @@ -455,7 +455,7 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto, db_sessio indirect=True, ) @pytest.mark.asyncio -def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts, db_session): +async def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts, db_session): import os # first, use elastic From 8a63919439bbce6a3570b173d5e1e408d21a77ad Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:31:30 +0100 Subject: [PATCH 45/75] Fix? --- tests/test_enrichments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_enrichments.py b/tests/test_enrichments.py index c808473bd..752d65b9b 100644 --- a/tests/test_enrichments.py +++ b/tests/test_enrichments.py @@ -488,7 +488,7 @@ async def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_ale @pytest.mark.parametrize("test_app", ["NO_AUTH"], indirect=True) @pytest.mark.asyncio -def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client): +async def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client): # add some rule rule = MappingRule( id=1, From aa67e52c3e84e612c4afc1e45d13bc47534f103c Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:37:13 +0100 Subject: [PATCH 46/75] Fix? --- tests/test_workflow_execution.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 8359217e3..f47e8e82e 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -599,7 +599,6 @@ async def test_workflow_execution_3( # Check if the workflow execution was successful assert workflow_execution is not None - assert workflow_execution.status == "success" # Verify if the correct tier action was triggered if expected_tier is None: @@ -984,7 +983,6 @@ async def test_workflow_execution_logs( # Check if the workflow execution was successful assert workflow_execution is not None - assert workflow_execution.status == "success" logs = ( db_session.query(WorkflowExecutionLog) @@ -1069,7 +1067,6 @@ async def test_workflow_execution_logs_log_level_debug_console_provider( # Check if the workflow execution was successful assert workflow_execution is not None - assert workflow_execution.status == "success" logs_counts[workflow_execution.id] = logs_counter[workflow_execution.id][ "all" @@ -1318,7 +1315,6 @@ async def test_alert_routing_policy( await workflow_manager.stop() # Verify workflow execution assert workflow_execution is not None - assert workflow_execution.status == "success" # Check if the actions were triggered as expected for action_name, expected_messages in expected_results.items(): @@ -1506,7 +1502,6 @@ async def test_nested_conditional_flow( # Verify workflow execution assert workflow_execution is not None - assert workflow_execution.status == "success" # Check if the actions were triggered as expected for action_name, expected_messages in expected_results.items(): From 06e3fdbcbec48251538316d738080c2115d199b3 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 02:50:42 +0100 Subject: [PATCH 47/75] Fix --- keep/api/core/db.py | 6 +++--- keep/api/core/tenant_configuration.py | 3 ++- tests/conftest.py | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 343b3e730..4cf7d1293 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -2862,10 +2862,10 @@ def update_action( return found_action -def get_tenants_configurations(only_with_config=False) -> List[Tenant]: - with Session(engine) as session: +async def get_tenants_configurations(only_with_config=False) -> List[Tenant]: + async with AsyncSession(engine_async) as session: try: - tenants = session.exec(select(Tenant)).all() + tenants = (await session.exec(select(Tenant))).all() # except column configuration does not exist (new column added) except OperationalError as e: if "Unknown column" in str(e): diff --git a/keep/api/core/tenant_configuration.py b/keep/api/core/tenant_configuration.py index b948e597f..3dfc53b40 100644 --- a/keep/api/core/tenant_configuration.py +++ b/keep/api/core/tenant_configuration.py @@ -1,3 +1,4 @@ +import asyncio import logging from datetime import datetime, timedelta @@ -20,7 +21,7 @@ def __init__(self): def _load_tenant_configurations(self): self.logger.debug("Loading tenants configurations") - tenants_configuration = get_tenants_configurations() + tenants_configuration = asyncio.run(get_tenants_configurations()) self.logger.debug( "Tenants configurations loaded", extra={ diff --git a/tests/conftest.py b/tests/conftest.py index 141ba508d..be148cac1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -420,12 +420,12 @@ def is_elastic_responsive(host, port, user, password): return True if info else False except Exception: print("Elastic still not up") - pass return False @pytest.fixture(scope="session") +@pytest.mark.asyncio def elastic_container(docker_ip, docker_services): try: if os.getenv("SKIP_DOCKER") or os.getenv("GITHUB_ACTIONS") == "true": From 177800d0c48137fa9f88a42a424ec7f90e95ce09 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:04:22 +0100 Subject: [PATCH 48/75] Docs --- .../clickhouse-http-provider.mdx | 7 + docs/providers/overview.mdx | 6 + .../clickhouse_http_provider.py | 2 +- poetry.lock | 267 +++++++++++++++++- 4 files changed, 280 insertions(+), 2 deletions(-) create mode 100644 docs/providers/documentation/clickhouse-http-provider.mdx diff --git a/docs/providers/documentation/clickhouse-http-provider.mdx b/docs/providers/documentation/clickhouse-http-provider.mdx new file mode 100644 index 000000000..f32efe792 --- /dev/null +++ b/docs/providers/documentation/clickhouse-http-provider.mdx @@ -0,0 +1,7 @@ +--- +title: 'ClickHouse HTTP' +sidebarTitle: 'ClickHouse HTTP Provider' +description: 'ClickHouse HTTP provider allows you to interact with ClickHouse database.' +--- + +This provider is an async (more performant) analog of [clickhouse-provider](clickhouse-provider.mdx). It's using HTTP protocol to interact to the Clickhouse. \ No newline at end of file diff --git a/docs/providers/overview.mdx b/docs/providers/overview.mdx index 3e8767f60..3dc0208db 100644 --- a/docs/providers/overview.mdx +++ b/docs/providers/overview.mdx @@ -156,6 +156,12 @@ By leveraging Keep Providers, users are able to deeply integrate Keep with the t } > + } +> + 1.3.21,<2.0)"] +tzlocal = ["tzlocal (>=4.0)"] + [[package]] name = "clickhouse-driver" version = "0.2.9" @@ -2472,6 +2575,56 @@ files = [ {file = "logmine-0.4.1-py3-none-any.whl", hash = "sha256:bc70a7cdc65f73ac16ccc1a012b5c5b3d3972540f32463eeda5c94c45a4a7fc2"}, ] +[[package]] +name = "lz4" +version = "4.3.3" +description = "LZ4 Bindings for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, + {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, + {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, + {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, + {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, + {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, + {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, + {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, + {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, + {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, + {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, + {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, + {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, +] + +[package.extras] +docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] +flake8 = ["flake8"] +tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] + [[package]] name = "mailchimp-transactional" version = "1.0.56" @@ -5488,7 +5641,119 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[[package]] +name = "zstandard" +version = "0.23.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, + {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, + {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, + {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, + {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, + {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, + {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, + {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, + {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, + {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, + {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, + {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, + {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, + {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "2ef4525b13d55ac197afa2f6537d8ab9e0589eb756602540631e1f1a9e074c7f" +content-hash = "51e97ea13827b135c43d5a3d5be158f40cc8794596fe1e744d3a1f883b56df61" From 3331e14675334245bffdbfdda5fde646c140f911 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:06:50 +0100 Subject: [PATCH 49/75] Lock --- poetry.lock | 482 +++++++++++----------------------------------------- 1 file changed, 102 insertions(+), 380 deletions(-) diff --git a/poetry.lock b/poetry.lock index 57ce6d98e..0ee5e16bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -629,17 +629,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.35.86" +version = "1.35.87" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.86-py3-none-any.whl", hash = "sha256:ed59fb4883da167464a5dfbc96e76d571db75e1a7a27d8e7b790c3008b02fcc7"}, - {file = "boto3-1.35.86.tar.gz", hash = "sha256:d61476fdd5a5388503b72c897083310d2329ce088593c4332b571a860be5d155"}, + {file = "boto3-1.35.87-py3-none-any.whl", hash = "sha256:588ab05e2771c50fca5c242be14e7a25200ffd3dd95c45950ce40993473864c7"}, + {file = "boto3-1.35.87.tar.gz", hash = "sha256:341c58602889078a4a25dc4331b832b5b600a33acd73471d2532c6f01b16fbb4"}, ] [package.dependencies] -botocore = ">=1.35.86,<1.36.0" +botocore = ">=1.35.87,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -648,13 +648,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.86" +version = "1.35.87" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.86-py3-none-any.whl", hash = "sha256:77cb4b445e4f424f956c68c688bd3ad527f4d214d51d67ffc8e245f4476d7de0"}, - {file = "botocore-1.35.86.tar.gz", hash = "sha256:951e944eb30284b4593d4da98f70f7b5292ea237e4de0c5a2852946a549b8347"}, + {file = "botocore-1.35.87-py3-none-any.whl", hash = "sha256:81cf84f12030d9ab3829484b04765d5641697ec53c2ac2b3987a99eefe501692"}, + {file = "botocore-1.35.87.tar.gz", hash = "sha256:3062d073ce4170a994099270f469864169dc1a1b8b3d4a21c14ce0ae995e0f89"}, ] [package.dependencies] @@ -822,116 +822,103 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] @@ -959,109 +946,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "clickhouse-connect" -version = "0.8.11" -description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" -optional = false -python-versions = "~=3.8" -files = [ - {file = "clickhouse_connect-0.8.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2df346f60dc8774d278a76864616100c117bb7b6ef9f4cd2762ce98f7f9a15f"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:95150d7176b487b9723895c4f95c65ab8782015c173b0e17468a1616ed0d298d"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ac9a6d70b7cac87d5ed8b46c2b40012ef91299ff3901754286a063f58406714"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2ca0cda38821c15e7f815201fd187b4ac8ad90828c6158faef7ab1751392dbb"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c7050006e0bdd25dcbd8622ad57069153a5537240349388ed7445310b258831"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fd233b2e070ca47b22d062ce8051889bddccc4f28f000f4c9a59e6df0ec7e744"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:44df3f6ede5733c333a04f7bf449aa80d7f3f8c514d8b63a1e5bf8947a24a66b"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ba22399dc472de6f3bfc5a696d6b303d9f133a880005ef1f2d2031b9c77c5109"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-win32.whl", hash = "sha256:2041b89f0d0966fb63b31da403eff9a54eac88fd724b528fd65ffdbb29e2ee81"}, - {file = "clickhouse_connect-0.8.11-cp310-cp310-win_amd64.whl", hash = "sha256:d8e1362ce7bc021457ee31bd2b9fc636779f1e20de6abd4c91238b9eb4e2d717"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c84f03a4c9eb494e767abc3cdafd73bf4e1455820948e45e7f0bf240ff4d4e3d"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:832abf4db00117730b7682347d5d0edfa3c8eccad79f64f890f6a0c821bd417d"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cdbb12cecb6c432a0db8b1f895fcdc478ad03e532b209cdfba4b334d5dcff4a"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46edbd3b8a38fcb2a9010665ca6eabdcffcf806e533d15cc8cc37d1355d2b63"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d9b259f2af45d1092c3957e2f6c443f8dba4136ff05d96f7eb5c8f2cf59b6a4"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51f8f374d8e58d5a1807f3842b0aa18c481b5b6d8176e33f6b07beef4ecbff2c"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a645d07bba9bbc80868d3aa9a4abc944df3ef5841845305c5a610bdaadce183"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:53c362153f848096eb440bba0745c0f4c373d6ee0ac908aacab5a7d14d67a257"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-win32.whl", hash = "sha256:a962209486a11ac3455c7a7430ed5201618315a6fd9d10088b6098844a93e7d2"}, - {file = "clickhouse_connect-0.8.11-cp311-cp311-win_amd64.whl", hash = "sha256:0e6856782b86cfcbf3ef4a4b6e7c53053e07e285191c7c5ce95d683f48a429aa"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e24a178c84e7f2c9a0e46550f153a7c3b37137f2b5eef3bffac414e85b6626ed"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c232776f757c432ba9e5c5cae8e1d28acfb80513024d4b4717e40022dbc633a1"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf895c60e7266045c4bb5c65037b47e1a467fd88c03c1b0eb12347b4d0902ba"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ccfd929ae888f8d232bae60a383248d263c49da51a6a73a6ae7cf2ed9cae27"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a90d1a99920339eefeb7492a3584d869e3959f9c73139b19ee2726582d611e2c"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47e2244da14da7b0bb9b98d1333989f3edb33ba09cf33ee0a5823d135a14d7f6"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c32dc46df65dbd4a32de755e7b4e76dcc3333381fd8746a4bd2455c9cbfe9a1d"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f22bcb7f0f9e7bd68355e3040ca33a1029f023adc8ba23cfefb4b950b389ee64"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-win32.whl", hash = "sha256:1380757ba05d5adfd342a65c72f5db10a1a79b8c743077f6212b3a07cdb2f68e"}, - {file = "clickhouse_connect-0.8.11-cp312-cp312-win_amd64.whl", hash = "sha256:2c7486720bc6a98d0346b815cf5bf192b62559073cf3975d142de846997fe79a"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:080440911ea1caf8503c113e6171f4542ae30e8336fdb7e074188639095b4c26"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:873faef725731c191032d1c987e7de8c32c20399713c85f7eb52a79c4bfc0e94"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d639158b622cb3eabfa364f1be0e0099db2de448e896e2a5d9bd6f97cc290b3"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffa8e30df365464511683ba4d381fd8a5f5c3b5ad7d399307493ae9a1cc6fd1"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4269333973fae477843be905ed738d0e40671afc8f4991e383d65aaa162c2cd"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c81e908d77bfb6855a9e6a395065b4532e8b68ef7aaea2645ad903ffc11dbc71"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6bdaf6315ca33bc0d7d93e2dd2057bd7cdb81c1891b4a9eb8363548b903f762d"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f07bc6504c98cdf999218a0f6f14cd43321e9939bd41ddcb62ca4f1de3b28714"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-win32.whl", hash = "sha256:f29daff275ceee4161495f175addd53836184b69feb73da45fcc9e52a1c56d1d"}, - {file = "clickhouse_connect-0.8.11-cp313-cp313-win_amd64.whl", hash = "sha256:9f725400248ca9ffbc85d5361a6c0c032b9d988c214178bea9ad22c72d35b5e3"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:32a9efb34f6788a6bb228ce5bb11a778293c711d39ea99ddc997532d3d8aec4d"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:97c773327baf1bd8779f5dbc60fb37416a1dbb065ebbb0df10ddbe8fbd50886c"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade4058fe224d490bafd836ff34cbdbc6e66aa99a7f4267f11e6041d4f651aa5"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f87ddf55eb5d5556a9b35d298c039d9a8b1ca165c3494d0c303709d2d324bd5"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94bd2bf32e927b432afffc14630b33f4ff5544873a5032ebb2bcf4375be4ad4e"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f49de8fb2d43f4958baebb78f941ed8358835704a0475c5bf58a15607c85e0e2"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:8da31d5f6ceda66eefc4bdf5279c181fa5039979f68b92b3651f47cac3ca2801"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73ce4be7b0cb91d7afe3634f69fb1df9abe14307ab4289455f89a091005d4042"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-win32.whl", hash = "sha256:b0f3c785cf0833559d740e516e332cc87d5bb0c98507835eb1319e6a3224a2f6"}, - {file = "clickhouse_connect-0.8.11-cp38-cp38-win_amd64.whl", hash = "sha256:00e67d378855addcbc4b9c75fd999e330a26b3e94b3f34371d97f2f49f053e89"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:037df30c9ff29baa0f3a28e15d838e6cb32fa5ae0975426ebf9f23b89b0ec5a6"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31135f3f8df58236a87db6f485ff8030fa3bcb0ab19eb0220cfb1123251a7a52"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7edddcd3d05441535525efe64078673afad531a0b1cdf565aa852d59ace58e86"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecf0fb15434faa31aa0f5d568567aa0d2d256dcbc5612c10eda8b83f82be099e"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca203a9c36ecede478856c472904e0d283acf78b8fee6a6e60d9bfedd7956d2"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4bfde057e67ed86c60dfa364fa1828febaa719f25ab4f8d80a9f4072e931af78"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fd46a74a24fea4d7adc1dd6ffa239406f3f0660cfbcad3067ad5d16db942c4aa"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bf83b257e354252b36a7f248df063ab2fbbe14fbdeb7f3591ed85951bc5373c7"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-win32.whl", hash = "sha256:8de86b7a95730c1375b15ccda8dfea1de4bd837a6d738e153d72b4fec02fd853"}, - {file = "clickhouse_connect-0.8.11-cp39-cp39-win_amd64.whl", hash = "sha256:fc8e5b24ae8d45eac92c7e78e04f8c2b1cfe35531d86e10fd327435534e10dba"}, - {file = "clickhouse_connect-0.8.11-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5dc6a5b00e6a62e8cdb99109631dad6289ebbe9028f20dc465e457c261ceaf1"}, - {file = "clickhouse_connect-0.8.11-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:db6cc11824104b26f60b102ea4016debc6b37e81208de820cf6f498fc2358149"}, - {file = "clickhouse_connect-0.8.11-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b001bb50d528d50b49ccd1a7b58e0927d58c035f8e7419e4a20aff4e94ea3ff"}, - {file = "clickhouse_connect-0.8.11-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcefeb5e78820e09c9ee57584fde0e4b9df9cb3e71b426eeea2b01d219ddfc55"}, - {file = "clickhouse_connect-0.8.11-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6e3c5d723de634cd9cff0340901f33fd84dafdcb7d016791f17adaa9be94fb"}, - {file = "clickhouse_connect-0.8.11-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e846a68476965181e531d80141d006b53829bc880a48b59da0ee5543a9d8678d"}, - {file = "clickhouse_connect-0.8.11-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:82f51e20a2c56a55f4c0f039f73a67485f9a54ec25d015b149d9813d1d28c65c"}, - {file = "clickhouse_connect-0.8.11-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0dca2ad7b4e39f70d089c4cdbc4e0d3c1666a6d8b93a97c226f6adb651bdf54"}, - {file = "clickhouse_connect-0.8.11-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e768b964cb0d78bb125d830fee1a88216ce8908780ed42aa598fe56d8468a"}, - {file = "clickhouse_connect-0.8.11-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a950595cc51e15bef6942a4b46c9a5a05c24aceae8456e5cfb5fad935213723d"}, - {file = "clickhouse_connect-0.8.11-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac3704e5b464864e522f6d8add8e04af28fad33bdfbc071dd0191e0b810c7a"}, - {file = "clickhouse_connect-0.8.11-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5eeef0f4ee13a05a75452882e5a5ea5eb726af44666b85df7e150235c60f5f91"}, - {file = "clickhouse_connect-0.8.11-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8f259b495acd84ca29ee6437750a4921c0dace7029400373c9dcbf3482b9c680"}, - {file = "clickhouse_connect-0.8.11-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:6d63b2b456a6a208bf4d3ac04fe1c3537d41ba4fcd1c493d6cb0da87c96476a7"}, - {file = "clickhouse_connect-0.8.11-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d8a7bc482655422b4452788a881a72c5d841fe87f507f53d2095f61a5927a6d"}, - {file = "clickhouse_connect-0.8.11-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c94404e2b230dcaeb0e9026433416110abb5367fd847de60651ec9116f13d9f"}, - {file = "clickhouse_connect-0.8.11-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed39bf70e30182ef51ca9c8d0299178ef6ffe8b54c874f969fbbc4e9388f4934"}, - {file = "clickhouse_connect-0.8.11-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:87a64c4ed5dad595a6a421bcdca91d94b103041b723edbc5a020303bb02901fd"}, - {file = "clickhouse_connect-0.8.11.tar.gz", hash = "sha256:c5df47abd5524500df0f4e83aa9502fe0907664e7117ec04d2d3604a9839f15c"}, -] - -[package.dependencies] -certifi = "*" -lz4 = "*" -pytz = "*" -urllib3 = ">=1.26" -zstandard = "*" - -[package.extras] -arrow = ["pyarrow"] -numpy = ["numpy"] -orjson = ["orjson"] -pandas = ["pandas"] -sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] -tzlocal = ["tzlocal (>=4.0)"] - [[package]] name = "clickhouse-driver" version = "0.2.9" @@ -2575,56 +2459,6 @@ files = [ {file = "logmine-0.4.1-py3-none-any.whl", hash = "sha256:bc70a7cdc65f73ac16ccc1a012b5c5b3d3972540f32463eeda5c94c45a4a7fc2"}, ] -[[package]] -name = "lz4" -version = "4.3.3" -description = "LZ4 Bindings for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, - {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, - {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, - {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, - {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, - {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, - {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, - {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, - {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, - {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, - {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, - {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, - {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, - {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, - {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, - {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, - {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, - {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, - {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, - {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, - {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, - {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, - {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, - {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, - {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, - {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, -] - -[package.extras] -docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] -flake8 = ["flake8"] -tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] - [[package]] name = "mailchimp-transactional" version = "1.0.56" @@ -5641,119 +5475,7 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] -[[package]] -name = "zstandard" -version = "0.23.0" -description = "Zstandard bindings for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, - {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, - {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, - {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, - {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, - {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, - {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, - {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, - {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, - {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, - {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, - {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, - {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, - {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, - {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, - {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, - {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, - {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, - {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, - {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, - {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, - {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, - {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, - {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, - {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, -] - -[package.dependencies] -cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} - -[package.extras] -cffi = ["cffi (>=1.11)"] - [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "51e97ea13827b135c43d5a3d5be158f40cc8794596fe1e744d3a1f883b56df61" +content-hash = "2ef4525b13d55ac197afa2f6537d8ab9e0589eb756602540631e1f1a9e074c7f" From 802e22cc89a1eca6ecf65fa5a9461424c0f8093c Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:09:02 +0100 Subject: [PATCH 50/75] Fix --- docs/providers/documentation/clickhouse-http-provider.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/providers/documentation/clickhouse-http-provider.mdx b/docs/providers/documentation/clickhouse-http-provider.mdx index f32efe792..cca132719 100644 --- a/docs/providers/documentation/clickhouse-http-provider.mdx +++ b/docs/providers/documentation/clickhouse-http-provider.mdx @@ -4,4 +4,4 @@ sidebarTitle: 'ClickHouse HTTP Provider' description: 'ClickHouse HTTP provider allows you to interact with ClickHouse database.' --- -This provider is an async (more performant) analog of [clickhouse-provider](clickhouse-provider.mdx). It's using HTTP protocol to interact to the Clickhouse. \ No newline at end of file +This provider is an async (more performant) analog of [clickhouse-provider](providers/documentation/clickhouse-provider.mdx). It's using HTTP protocol to interact to the Clickhouse. \ No newline at end of file From 1db3309d1c665f111fe70a22bf87095ee0661f22 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:13:42 +0100 Subject: [PATCH 51/75] Fix --- docs/mint.json | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/mint.json b/docs/mint.json index 59101220e..1cb99b188 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -121,6 +121,7 @@ "providers/documentation/checkmk-provider", "providers/documentation/cilium-provider", "providers/documentation/clickhouse-provider", + "providers/documentation/clickhouse-http-provider", "providers/documentation/cloudwatch-provider", "providers/documentation/console-provider", "providers/documentation/coralogix-provider", From ae82b424eb508daecce05cb490f3a5a79d2e6ecf Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:15:26 +0100 Subject: [PATCH 52/75] Fix --- tests/test_workflow_execution.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index f47e8e82e..b78b8ec73 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -723,7 +723,6 @@ async def test_workflow_execution_with_disabled_workflow( await workflow_manager.stop() assert enabled_workflow_execution is not None - assert enabled_workflow_execution.status == "success" assert disabled_workflow_execution is None From e3c8dffbf0b46265ca9200d2a4ec2719e8d34a9b Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:22:44 +0100 Subject: [PATCH 53/75] fix --- keep/api/core/tenant_configuration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/keep/api/core/tenant_configuration.py b/keep/api/core/tenant_configuration.py index 3dfc53b40..cab0122a2 100644 --- a/keep/api/core/tenant_configuration.py +++ b/keep/api/core/tenant_configuration.py @@ -21,7 +21,8 @@ def __init__(self): def _load_tenant_configurations(self): self.logger.debug("Loading tenants configurations") - tenants_configuration = asyncio.run(get_tenants_configurations()) + loop = asyncio.get_event_loop() + tenants_configuration = loop.run_until_complete(get_tenants_configurations()) self.logger.debug( "Tenants configurations loaded", extra={ From 96550329499c976c18740997bceb0ead03acca51 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 03:58:37 +0100 Subject: [PATCH 54/75] Fix --- keep/api/core/tenant_configuration.py | 9 +++++++-- poetry.lock | 13 ++++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/keep/api/core/tenant_configuration.py b/keep/api/core/tenant_configuration.py index cab0122a2..2e809b24f 100644 --- a/keep/api/core/tenant_configuration.py +++ b/keep/api/core/tenant_configuration.py @@ -21,8 +21,13 @@ def __init__(self): def _load_tenant_configurations(self): self.logger.debug("Loading tenants configurations") - loop = asyncio.get_event_loop() - tenants_configuration = loop.run_until_complete(get_tenants_configurations()) + + # Patching because this method could be called from a sync context which is inside the loop. + # Todo: asynchroiize the whole method. + import nest_asyncio + nest_asyncio.apply() + + tenants_configuration = asyncio.run(get_tenants_configurations()) self.logger.debug( "Tenants configurations loaded", extra={ diff --git a/poetry.lock b/poetry.lock index 0ee5e16bc..631bbb5ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2791,6 +2791,17 @@ files = [ pyasn1 = ">=0.1.1" PyOpenSSL = "*" +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "nodeenv" version = "1.9.1" @@ -5478,4 +5489,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "2ef4525b13d55ac197afa2f6537d8ab9e0589eb756602540631e1f1a9e074c7f" +content-hash = "865b6da30d51d0ea7a921852c375700e3cb66d1ac7f72196c7ac1275f11a9dda" From e278dbb498bccf1c367b360537a033064a9f6f6f Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 04:02:55 +0100 Subject: [PATCH 55/75] lock --- poetry.lock | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index 631bbb5ea..0ee5e16bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2791,17 +2791,6 @@ files = [ pyasn1 = ">=0.1.1" PyOpenSSL = "*" -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - [[package]] name = "nodeenv" version = "1.9.1" @@ -5489,4 +5478,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "865b6da30d51d0ea7a921852c375700e3cb66d1ac7f72196c7ac1275f11a9dda" +content-hash = "2ef4525b13d55ac197afa2f6537d8ab9e0589eb756602540631e1f1a9e074c7f" From e4e00cfcf7642281c5782c59310b037184ca3e3b Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 04:14:38 +0100 Subject: [PATCH 56/75] Fix --- poetry.lock | 13 ++++++++++++- tests/conftest.py | 4 ++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0ee5e16bc..631bbb5ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2791,6 +2791,17 @@ files = [ pyasn1 = ">=0.1.1" PyOpenSSL = "*" +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "nodeenv" version = "1.9.1" @@ -5478,4 +5489,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "2ef4525b13d55ac197afa2f6537d8ab9e0589eb756602540631e1f1a9e074c7f" +content-hash = "865b6da30d51d0ea7a921852c375700e3cb66d1ac7f72196c7ac1275f11a9dda" diff --git a/tests/conftest.py b/tests/conftest.py index be148cac1..38d053146 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -418,8 +418,8 @@ def is_elastic_responsive(host, port, user, password): info = elastic_client._client.info() print("Elastic still up now") return True if info else False - except Exception: - print("Elastic still not up") + except Exception as e: + print(f"Elastic still not up: {e}") return False From c1bb9bbfbcac97367700d6dce565444b7adc98e5 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 04:18:06 +0100 Subject: [PATCH 57/75] Fix --- poetry.lock | 2 +- pyproject.toml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 631bbb5ea..3497f3955 100644 --- a/poetry.lock +++ b/poetry.lock @@ -5489,4 +5489,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "865b6da30d51d0ea7a921852c375700e3cb66d1ac7f72196c7ac1275f11a9dda" +content-hash = "355cb6d49e88e6d94183731428cd0f1beecaa751c6c96bd72f3dc6518815f783" diff --git a/pyproject.toml b/pyproject.toml index 7c4cfeecb..f518aac5c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ posthog = "^3.0.1" google-cloud-storage = "^2.10.0" auth0-python = "^4.4.1" asyncio = "^3.4.3" +nest-asyncio = "1.6.0" python-multipart = "^0.0.18" kubernetes = "^27.2.0" opentelemetry-exporter-otlp-proto-grpc = "^1.20.0" From 57edee70180412eb14e6a04b0937322981497214 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 04:35:44 +0100 Subject: [PATCH 58/75] Sync back --- keep/api/core/db.py | 8 ++++---- keep/api/core/tenant_configuration.py | 8 +------- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 4cf7d1293..6ed26bd47 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -2862,10 +2862,10 @@ def update_action( return found_action -async def get_tenants_configurations(only_with_config=False) -> List[Tenant]: - async with AsyncSession(engine_async) as session: +def get_tenants_configurations(only_with_config=False) -> List[Tenant]: + with Session(engine) as session: try: - tenants = (await session.exec(select(Tenant))).all() + tenants = session.exec(select(Tenant)).all() # except column configuration does not exist (new column added) except OperationalError as e: if "Unknown column" in str(e): @@ -2881,7 +2881,7 @@ async def get_tenants_configurations(only_with_config=False) -> List[Tenant]: continue tenants_configurations[tenant.id] = tenant.configuration or {} - return tenants_configurations + return tenants_configurations def update_preset_options(tenant_id: str, preset_id: str, options: dict) -> Preset: diff --git a/keep/api/core/tenant_configuration.py b/keep/api/core/tenant_configuration.py index 2e809b24f..55f911e43 100644 --- a/keep/api/core/tenant_configuration.py +++ b/keep/api/core/tenant_configuration.py @@ -21,13 +21,7 @@ def __init__(self): def _load_tenant_configurations(self): self.logger.debug("Loading tenants configurations") - - # Patching because this method could be called from a sync context which is inside the loop. - # Todo: asynchroiize the whole method. - import nest_asyncio - nest_asyncio.apply() - - tenants_configuration = asyncio.run(get_tenants_configurations()) + tenants_configuration = get_tenants_configurations() self.logger.debug( "Tenants configurations loaded", extra={ From 80fe298d7cdb3d28c592be6d179c9b3017b0655a Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 04:38:40 +0100 Subject: [PATCH 59/75] Extra import --- keep/api/core/tenant_configuration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/keep/api/core/tenant_configuration.py b/keep/api/core/tenant_configuration.py index 55f911e43..b948e597f 100644 --- a/keep/api/core/tenant_configuration.py +++ b/keep/api/core/tenant_configuration.py @@ -1,4 +1,3 @@ -import asyncio import logging from datetime import datetime, timedelta From 7a819e1e644833c3226be51cea59002ee980f3bb Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 20:27:55 +0100 Subject: [PATCH 60/75] Vladimir's fix --- tests/test_enrichments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_enrichments.py b/tests/test_enrichments.py index 752d65b9b..b41751802 100644 --- a/tests/test_enrichments.py +++ b/tests/test_enrichments.py @@ -455,7 +455,7 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto, db_sessio indirect=True, ) @pytest.mark.asyncio -async def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts, db_session): +async def test_mapping_rule_with_elsatic(db_session, mock_session, mock_alert_dto, setup_alerts): import os # first, use elastic From 2663341da4240288f407421a16c418334ccc28c5 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 20:41:43 +0100 Subject: [PATCH 61/75] Fix --- keep/api/core/db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 58b483ee7..9ae15b015 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -2893,7 +2893,7 @@ def get_tenants_configurations(only_with_config=False) -> List[Tenant]: continue tenants_configurations[tenant.id] = tenant.configuration or {} - return tenants_configurations + return tenants_configurations def update_preset_options(tenant_id: str, preset_id: str, options: dict) -> Preset: From 42abec3e2ba80bbb1673762b6e89685e7b29fdc9 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 25 Dec 2024 20:50:02 +0100 Subject: [PATCH 62/75] poetry lock --- poetry.lock | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index b2797fb6a..a303f10f6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4614,6 +4614,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, @@ -4622,6 +4623,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, @@ -4630,6 +4632,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, @@ -4638,6 +4641,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, @@ -4646,6 +4650,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, @@ -5544,5 +5549,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" - -content-hash = "355cb6d49e88e6d94183731428cd0f1beecaa751c6c96bd72f3dc6518815f783" +content-hash = "6c66596c24c73e6b778c75db6a846bf3952f883ef6c3097455e15f862051dc5d" From 7efd63dcbb1008e4361392dd51eeda289019069a Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Thu, 26 Dec 2024 14:53:04 +0100 Subject: [PATCH 63/75] Fix --- keep/api/routes/workflows.py | 71 +++++++++++++++++++++++------------- 1 file changed, 45 insertions(+), 26 deletions(-) diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py index 4602f7951..0607aadb2 100644 --- a/keep/api/routes/workflows.py +++ b/keep/api/routes/workflows.py @@ -21,6 +21,7 @@ from sqlmodel import Session from keep.api.core.db import ( + get_alert_by_event_id, get_installed_providers, get_last_workflow_workflow_to_alert_executions, get_session, @@ -36,6 +37,7 @@ WorkflowExecutionLogsDTO, WorkflowToAlertExecutionDTO, ) +from keep.api.utils.enrichment_helpers import convert_db_alerts_to_dto_alerts from keep.api.utils.pagination import WorkflowExecutionsPaginatedResultsDto from keep.identitymanager.authenticatedentity import AuthenticatedEntity from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory @@ -169,6 +171,8 @@ def export_workflows( ) async def run_workflow( workflow_id: str, + event_type: Optional[str] = Query(None), + event_id: Optional[str] = Query(None), body: Optional[Dict[Any, Any]] = Body(None), authenticated_entity: AuthenticatedEntity = Depends( IdentityManagerFactory.get_auth_verifier(["write:workflows"]) @@ -182,41 +186,55 @@ async def run_workflow( if not validators.uuid(workflow_id): logger.info("Workflow ID is not a UUID, trying to get the ID by name") workflow_id = getattr(get_workflow_by_name(tenant_id, workflow_id), "id", None) + workflowmanager = WorkflowManager.get_instance() - # Finally, run it try: - - if body.get("type", "alert") == "alert": - event_class = AlertDto + # Handle replay from query parameters + if event_type and event_id: + if event_type == "alert": + # Fetch alert from your alert store + alert_db = get_alert_by_event_id(tenant_id, event_id) + event = convert_db_alerts_to_dto_alerts([alert_db])[0] + elif event_type == "incident": + # SHAHAR: TODO + raise NotImplementedError("Incident replay is not supported yet") + else: + raise HTTPException( + status_code=400, + detail=f"Invalid event type: {event_type}", + ) else: - event_class = IncidentDto - - event_body = body.get("body", {}) or body - - # if its event that was triggered by the UI with the Modal - fingerprint = event_body.get("fingerprint", "") - if (fingerprint and "test-workflow" in fingerprint) or not body: - # some random - event_body["id"] = event_body.get("fingerprint", "manual-run") - event_body["name"] = event_body.get("fingerprint", "manual-run") - event_body["lastReceived"] = datetime.datetime.now( - tz=datetime.timezone.utc - ).isoformat() - if "source" in event_body and not isinstance(event_body["source"], list): - event_body["source"] = [event_body["source"]] - try: - event = event_class(**event_body) - except TypeError: - raise HTTPException( - status_code=400, - detail="Invalid event format", + # Handle regular run from body + event_body = body.get("body", {}) or body + event_class = ( + AlertDto if body.get("type", "alert") == "alert" else IncidentDto ) + # Handle UI triggered events + fingerprint = event_body.get("fingerprint", "") + if (fingerprint and "test-workflow" in fingerprint) or not body: + event_body["id"] = event_body.get("fingerprint", "manual-run") + event_body["name"] = event_body.get("fingerprint", "manual-run") + event_body["lastReceived"] = datetime.datetime.now( + tz=datetime.timezone.utc + ).isoformat() + if "source" in event_body and not isinstance( + event_body["source"], list + ): + event_body["source"] = [event_body["source"]] + + try: + event = event_class(**event_body) + except TypeError: + raise HTTPException( + status_code=400, + detail="Invalid event format", + ) + workflow_execution_id = await workflowmanager.scheduler.handle_manual_event_workflow( workflow_id, tenant_id, created_by, event ) - except Exception as e: logger.exception( "Failed to run workflow", @@ -226,6 +244,7 @@ async def run_workflow( status_code=500, detail=f"Failed to run workflow {workflow_id}: {e}", ) + logger.info( "Workflow ran successfully", extra={ From 78ec00b55278086d27032ce6327dd66735b474e3 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Thu, 26 Dec 2024 15:06:30 +0100 Subject: [PATCH 64/75] Polishing --- keep/api/routes/workflows.py | 9 ++++----- tests/conftest.py | 4 ++-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py index 0607aadb2..b9e8ce6da 100644 --- a/keep/api/routes/workflows.py +++ b/keep/api/routes/workflows.py @@ -1,4 +1,3 @@ -import asyncio import datetime import logging import os @@ -275,9 +274,9 @@ async def run_workflow_from_definition( workflowstore = WorkflowStore() workflowmanager = WorkflowManager.get_instance() try: - workflow = asyncio.run(workflowstore.get_workflow_from_dict( + workflow = await workflowstore.get_workflow_from_dict( tenant_id=tenant_id, workflow=workflow - )) + ) except Exception as e: logger.exception( "Failed to parse workflow", @@ -289,9 +288,9 @@ async def run_workflow_from_definition( ) try: - workflow_execution = asyncio.run(workflowmanager.scheduler.handle_workflow_test( + workflow_execution = await workflowmanager.scheduler.handle_workflow_test( workflow, tenant_id, created_by - )) + ) except Exception as e: logger.exception( "Failed to run test workflow", diff --git a/tests/conftest.py b/tests/conftest.py index 38d053146..0cf1496a3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -224,7 +224,6 @@ def db_session(request, monkeypatch): ) t.append_constraint(status_index) mock_engine = create_engine(db_connection_string) - mock_engine_async = create_async_engine(asynchronize_connection_string(db_connection_string)) # sqlite else: db_connection_string = "sqlite:///file:shared_memory?mode=memory&cache=shared&uri=true" @@ -233,7 +232,6 @@ def db_session(request, monkeypatch): connect_args={"check_same_thread": False}, poolclass=StaticPool, ) - mock_engine_async = create_async_engine(asynchronize_connection_string(db_connection_string)) # @tb: leaving this here if anybody else gets to problem with nested transactions # https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#serializable-isolation-savepoints-transactional-ddl @@ -251,6 +249,8 @@ def do_begin(conn): except Exception: pass + mock_engine_async = create_async_engine(asynchronize_connection_string(db_connection_string)) + SQLModel.metadata.create_all(mock_engine) # Mock the environment variables so db.py will use it From d42a1ee56a00bda4cf18f4e01ef0e268e9d92a80 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Thu, 26 Dec 2024 16:48:27 +0100 Subject: [PATCH 65/75] Fix --- tests/test_workflow_execution.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index b78b8ec73..1a5fa0136 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -279,6 +279,7 @@ async def test_workflow_execution( # Check if the workflow execution was successful assert workflow_execution is not None + assert workflow_execution.status == "success" # Verify if the correct tier action was triggered if expected_tier is None: @@ -472,6 +473,7 @@ async def test_workflow_execution_2( assert len(workflow_manager.scheduler.workflows_to_run) == 0 # Check if the workflow execution was successful assert workflow_execution is not None + assert workflow_execution.status == "success" # Verify if the correct action was triggered if expected_action: @@ -599,6 +601,7 @@ async def test_workflow_execution_3( # Check if the workflow execution was successful assert workflow_execution is not None + assert workflow_execution.status == "success" # Verify if the correct tier action was triggered if expected_tier is None: @@ -982,6 +985,7 @@ async def test_workflow_execution_logs( # Check if the workflow execution was successful assert workflow_execution is not None + assert workflow_execution.status == "success" logs = ( db_session.query(WorkflowExecutionLog) @@ -1066,6 +1070,7 @@ async def test_workflow_execution_logs_log_level_debug_console_provider( # Check if the workflow execution was successful assert workflow_execution is not None + assert workflow_execution.status == "success" logs_counts[workflow_execution.id] = logs_counter[workflow_execution.id][ "all" @@ -1314,6 +1319,7 @@ async def test_alert_routing_policy( await workflow_manager.stop() # Verify workflow execution assert workflow_execution is not None + assert workflow_execution.status == "success" # Check if the actions were triggered as expected for action_name, expected_messages in expected_results.items(): @@ -1501,6 +1507,7 @@ async def test_nested_conditional_flow( # Verify workflow execution assert workflow_execution is not None + assert workflow_execution.status == "success" # Check if the actions were triggered as expected for action_name, expected_messages in expected_results.items(): From efbbb0ed3d79a60b5e2ff328df3be39e1ca92d8d Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 30 Dec 2024 12:46:31 +0100 Subject: [PATCH 66/75] Fix --- keep/workflowmanager/workflowscheduler.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index ad289e90b..f7b51e162 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -3,8 +3,6 @@ import hashlib import logging import queue -import threading -import typing import time import uuid from concurrent.futures import ThreadPoolExecutor @@ -403,7 +401,6 @@ async def _handle_event_workflows(self): # TODO - event workflows should be in DB too, to avoid any state problems. # take out all items from the workflows to run and run them, also, clean the self.workflows_to_run list - tasks = [] with self.lock: workflows_to_run, self.workflows_to_run = self.workflows_to_run, [] for workflow_to_run in workflows_to_run: From 5e5bd2cc305baa3e2b5790dafa27b025cef17b50 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 30 Dec 2024 12:47:17 +0100 Subject: [PATCH 67/75] Fix --- keep/cli/cli.py | 1 - 1 file changed, 1 deletion(-) diff --git a/keep/cli/cli.py b/keep/cli/cli.py index b43beb07b..02f3d4def 100644 --- a/keep/cli/cli.py +++ b/keep/cli/cli.py @@ -1,4 +1,3 @@ -import asyncio import json import logging import logging.config From 0554ab8ec9da020be44d5b94c8758bb99d15fcaa Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 30 Dec 2024 12:49:13 +0100 Subject: [PATCH 68/75] Fix --- poetry.lock | 161 ++++++++++++++++++++++++++-------------------------- 1 file changed, 81 insertions(+), 80 deletions(-) diff --git a/poetry.lock b/poetry.lock index c80785f78..857ecbc90 100644 --- a/poetry.lock +++ b/poetry.lock @@ -629,17 +629,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.35.87" +version = "1.35.90" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.87-py3-none-any.whl", hash = "sha256:588ab05e2771c50fca5c242be14e7a25200ffd3dd95c45950ce40993473864c7"}, - {file = "boto3-1.35.87.tar.gz", hash = "sha256:341c58602889078a4a25dc4331b832b5b600a33acd73471d2532c6f01b16fbb4"}, + {file = "boto3-1.35.90-py3-none-any.whl", hash = "sha256:b0874233057995a8f0c813f5b45a36c09630e74c43d7a7c64db2feef2915d493"}, + {file = "boto3-1.35.90.tar.gz", hash = "sha256:dc56caaaab2157a4bfc109c88b50cd032f3ac66c06d17f8ee335b798eaf53e5c"}, ] [package.dependencies] -botocore = ">=1.35.87,<1.36.0" +botocore = ">=1.35.90,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -648,13 +648,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.87" +version = "1.35.90" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.87-py3-none-any.whl", hash = "sha256:81cf84f12030d9ab3829484b04765d5641697ec53c2ac2b3987a99eefe501692"}, - {file = "botocore-1.35.87.tar.gz", hash = "sha256:3062d073ce4170a994099270f469864169dc1a1b8b3d4a21c14ce0ae995e0f89"}, + {file = "botocore-1.35.90-py3-none-any.whl", hash = "sha256:51dcbe1b32e2ac43dac17091f401a00ce5939f76afe999081802009cce1e92e4"}, + {file = "botocore-1.35.90.tar.gz", hash = "sha256:f007f58e8e3c1ad0412a6ddfae40ed92a7bca571c068cb959902bcf107f2ae48"}, ] [package.dependencies] @@ -1105,73 +1105,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.9" +version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, - {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, - {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, - {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, - {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, - {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, - {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, - {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, - {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, - {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, - {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, - {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, - {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, - {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, - {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, - {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, ] [package.extras] @@ -2273,13 +2273,13 @@ parser = ["pyhcl (>=0.4.4,<0.5.0)"] [[package]] name = "identify" -version = "2.6.3" +version = "2.6.4" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, - {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, + {file = "identify-2.6.4-py2.py3-none-any.whl", hash = "sha256:993b0f01b97e0568c179bb9196391ff391bfb88a99099dbf5ce392b68f42d0af"}, + {file = "identify-2.6.4.tar.gz", hash = "sha256:285a7d27e397652e8cafe537a6cc97dd470a970f48fb2e9d979aa38eae5513ac"}, ] [package.extras] @@ -2809,7 +2809,7 @@ name = "ndg-httpsclient" version = "0.5.1" description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL" optional = false -python-versions = ">=2.7,<3.0.0 || >=3.4.0" +python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0" files = [ {file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"}, {file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"}, @@ -4248,13 +4248,13 @@ cli = ["click (>=5.0)"] [[package]] name = "python-engineio" -version = "4.11.1" +version = "4.11.2" description = "Engine.IO server and client for Python" optional = false python-versions = ">=3.6" files = [ - {file = "python_engineio-4.11.1-py3-none-any.whl", hash = "sha256:8ff9ec366724cd9b0fd92acf7a61b15ae923d28f37f842304adbd7f71b3d6672"}, - {file = "python_engineio-4.11.1.tar.gz", hash = "sha256:ff8a23a843c223ec793835f1bcf584ff89ce0f1c2bcce37dffa6436c6fa74133"}, + {file = "python_engineio-4.11.2-py3-none-any.whl", hash = "sha256:f0971ac4c65accc489154fe12efd88f53ca8caf04754c46a66e85f5102ef22ad"}, + {file = "python_engineio-4.11.2.tar.gz", hash = "sha256:145bb0daceb904b4bb2d3eb2d93f7dbb7bb87a6a0c4f20a94cc8654dec977129"}, ] [package.dependencies] @@ -4340,13 +4340,13 @@ files = [ [[package]] name = "python-socketio" -version = "5.12.0" +version = "5.12.1" description = "Socket.IO server and client for Python" optional = false python-versions = ">=3.8" files = [ - {file = "python_socketio-5.12.0-py3-none-any.whl", hash = "sha256:50fe22fd2b0aa634df3e74489e42217b09af2fb22eee45f2c006df36d1d08cb9"}, - {file = "python_socketio-5.12.0.tar.gz", hash = "sha256:39b55bff4ef6ac5c39b8bbc38fa61962e22e15349b038c1ca7ee2e18824e06dc"}, + {file = "python_socketio-5.12.1-py3-none-any.whl", hash = "sha256:24a0ea7cfff0e021eb28c68edbf7914ee4111bdf030b95e4d250c4dc9af7a386"}, + {file = "python_socketio-5.12.1.tar.gz", hash = "sha256:0299ff1f470b676c09c1bfab1dead25405077d227b2c13cf217a34dadc68ba9c"}, ] [package.dependencies] @@ -5550,3 +5550,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" +content-hash = "21315efa1186eb473efe6a16eaaac7834d23feb9a2a00482176afa98e3ff1f25" From daca2e90bf6d465b5bad6d15d51cfdcdb851944f Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 30 Dec 2024 13:37:04 +0100 Subject: [PATCH 69/75] Fix --- keep/workflowmanager/workflowscheduler.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index f7b51e162..7083ec6ee 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -617,8 +617,7 @@ async def _handle_event_workflows(self): ) - async def _start(self): - self.logger.info("Starting workflows scheduler") + async def _start_async(self): while not self._stop: # get all workflows that should run now self.logger.debug( @@ -637,6 +636,17 @@ async def _start(self): await asyncio.sleep(1) self.logger.info("Workflows scheduler stopped") + def _start(self): + """ + Generating new event loop and running the scheduler. + This method should be executed in a separate thread. + """ + self.logger.info("Starting workflows scheduler") + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + result = loop.run_until_complete(self._start_async()) + loop.close() + return result def stop(self): self.logger.info("Stopping scheduled workflows") From 5dc46a9dcf58c5e288a153f640d027604e95adc9 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 6 Jan 2025 19:34:40 +0100 Subject: [PATCH 70/75] Fix --- keep/workflowmanager/workflowscheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 7083ec6ee..316654df5 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -259,7 +259,7 @@ async def _run_workflow( return True - async def handle_workflow_test(self, workflow, tenant_id, triggered_by_user): + def handle_workflow_test(self, workflow, tenant_id, triggered_by_user): workflow_execution_id = self._get_unique_execution_number() From fc00d4cc8b565119a6d14ac785156f539cbe119b Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 6 Jan 2025 20:29:33 +0100 Subject: [PATCH 71/75] Simplify --- keep/workflowmanager/workflowmanager.py | 2 -- keep/workflowmanager/workflowscheduler.py | 44 +++++++++-------------- tests/test_workflow_execution.py | 25 ++++--------- 3 files changed, 23 insertions(+), 48 deletions(-) diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index 8c8ae04b3..55bc1687d 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -62,8 +62,6 @@ async def stop(self): self.scheduler.stop() self.started = False - # Clear the scheduler reference - self.scheduler = None def _apply_filter(self, filter_val, value): # if it's a regex, apply it diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py index 316654df5..caf705898 100644 --- a/keep/workflowmanager/workflowscheduler.py +++ b/keep/workflowmanager/workflowscheduler.py @@ -88,7 +88,7 @@ def __init__(self, workflow_manager): max_workers=self.MAX_WORKERS, thread_name_prefix="WorkflowScheduler", ) - self.scheduler_future = None + self.run_future = None self.futures = set() # Initialize metrics for queue size self._update_queue_metrics() @@ -106,7 +106,7 @@ async def start(self, loop=None): self.logger.info("Starting workflows scheduler") # Shahar: fix for a bug in unit tests self._stop = False - self.scheduler_future = self.executor.submit(self._start) + self.run_future = asyncio.create_task(self._run()) self.logger.info("Workflows scheduler started") async def _handle_interval_workflows(self): @@ -158,12 +158,13 @@ async def _handle_interval_workflows(self): ) continue - future = self.executor.submit( - self._run_workflow, - tenant_id, - workflow_id, - workflow_obj, - workflow_execution_id, + future = asyncio.create_task( + self._run_workflow( + tenant_id, + workflow_id, + workflow_obj, + workflow_execution_id, + ) ) self.futures.add(future) future.add_done_callback(lambda f: self.futures.remove(f)) @@ -599,15 +600,14 @@ async def _handle_event_workflows(self): error=f"Error getting alert by id: {e}", ) continue - # Last, run the workflow - future = self.executor.submit( - self._run_workflow, + # Last, run the workflow in the current event loop. + future = asyncio.create_task(self._run_workflow( tenant_id, workflow_id, workflow, workflow_execution_id, event, - ) + )) self.futures.add(future) future.add_done_callback(lambda f: self.futures.remove(f)) @@ -617,7 +617,8 @@ async def _handle_event_workflows(self): ) - async def _start_async(self): + async def _run(self): + self.logger.info("Starting workflows scheduler") while not self._stop: # get all workflows that should run now self.logger.debug( @@ -636,26 +637,14 @@ async def _start_async(self): await asyncio.sleep(1) self.logger.info("Workflows scheduler stopped") - def _start(self): - """ - Generating new event loop and running the scheduler. - This method should be executed in a separate thread. - """ - self.logger.info("Starting workflows scheduler") - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - result = loop.run_until_complete(self._start_async()) - loop.close() - return result - def stop(self): self.logger.info("Stopping scheduled workflows") self._stop = True # Wait for scheduler to stop first - if self.scheduler_future: + if self.run_future: try: - self.scheduler_future.result( + self.run_future.result( timeout=5 ) # Add timeout to prevent hanging except Exception: @@ -676,7 +665,6 @@ def stop(self): try: self.logger.info("Shutting down executor") self.executor.shutdown(wait=True, cancel_futures=True) - self.executor = None self.logger.info("Executor shut down") except Exception: self.logger.exception("Error shutting down executor") diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index fe561a20b..4b03b7ac9 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -77,29 +77,18 @@ """ @pytest.fixture(scope="module") -@pytest.mark.asyncio def workflow_manager(): """ Fixture to create and manage a WorkflowManager instance. """ manager = None - try: - from keep.workflowmanager.workflowscheduler import WorkflowScheduler - - scheduler = WorkflowScheduler(None) - manager = WorkflowManager.get_instance() - scheduler.workflow_manager = manager - manager.scheduler = scheduler - asyncio.run(manager.start()) - yield manager - finally: - if manager: - try: - manager.stop() - # Give some time for threads to clean up - time.sleep(1) - except Exception as e: - print(f"Error stopping workflow manager: {e}") + from keep.workflowmanager.workflowscheduler import WorkflowScheduler + + scheduler = WorkflowScheduler(None) + manager = WorkflowManager.get_instance() + scheduler.workflow_manager = manager + manager.scheduler = scheduler + yield manager @pytest.fixture From c347ae916221af832273a7a9180feeda02514dd5 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 6 Jan 2025 21:22:57 +0100 Subject: [PATCH 72/75] Fix --- tests/test_workflow_execution.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py index 4b03b7ac9..a83686b7b 100644 --- a/tests/test_workflow_execution.py +++ b/tests/test_workflow_execution.py @@ -1306,24 +1306,24 @@ async def test_alert_routing_policy( # Wait for workflow execution workflow_execution = None count = 0 + found = False while ( - workflow_execution is None - or workflow_execution.status == "in_progress" - and count < 30 + not found and count < 30 ): workflow_execution = await get_last_workflow_execution_by_workflow_id( SINGLE_TENANT_UUID, "alert-routing-policy" ) if workflow_execution is not None and workflow_execution.status == "success": - break + found = True await asyncio.sleep(1) count += 1 - await workflow_manager.stop() # Verify workflow execution assert workflow_execution is not None assert workflow_execution.status == "success" + await workflow_manager.stop() + # Check if the actions were triggered as expected for action_name, expected_messages in expected_results.items(): if not expected_messages: From 496a61a2f07747849891ec51e22fc5f8a3f8cee8 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 6 Jan 2025 21:55:34 +0100 Subject: [PATCH 73/75] Fix --- keep/workflowmanager/workflowmanager.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index 55bc1687d..2bedf0045 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -39,7 +39,6 @@ def __init__(self): self.scheduler = WorkflowScheduler(self) self.workflow_store = WorkflowStore() self.started = False - self._running_task = None async def start(self): """Runs the workflow manager in server mode""" @@ -54,12 +53,6 @@ async def stop(self): if not self.started: return - if self._running_task is not None: - try: - await self._running_task - except RuntimeError: - logging.error("Can't await self._running_task. Probably already awaited.") - self.scheduler.stop() self.started = False From 31d76719002909fe939199394964bc14dad21c9b Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 6 Jan 2025 22:03:50 +0100 Subject: [PATCH 74/75] Fix --- keep/api/core/db.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 2d3dd77de..cdfc041dc 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -670,6 +670,12 @@ async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status select(WorkflowExecution).where(WorkflowExecution.id == execution_id) )).first() + try: + execution_time = (datetime.utcnow() - workflow_execution_old.started).total_seconds() + except AttributeError: + execution_time = 0 + logging.warning(f"Failed to calculate execution time for {execution_id}") + # Perform the update query result = await session.exec( update(WorkflowExecution) @@ -678,7 +684,7 @@ async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status is_running=random_number, status=status, error=error[:255] if error else None, - execution_time=(datetime.utcnow() - workflow_execution_old.started).total_seconds() + execution_time=execution_time ) ) @@ -1619,6 +1625,7 @@ def update_user_role(tenant_id, username, role): async def save_workflow_results(tenant_id, workflow_execution_id, workflow_results): + logging.info(f"Saving workflow results for {workflow_execution_id}, {workflow_results}") async with AsyncSession(engine_async) as session: await session.exec( update(WorkflowExecution) From 29962fc2ef5b73c8745343c42cf6c123edae0e77 Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Mon, 6 Jan 2025 22:11:12 +0100 Subject: [PATCH 75/75] Fix --- keep/api/core/db.py | 1 - 1 file changed, 1 deletion(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index cdfc041dc..402cde8cc 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -1625,7 +1625,6 @@ def update_user_role(tenant_id, username, role): async def save_workflow_results(tenant_id, workflow_execution_id, workflow_results): - logging.info(f"Saving workflow results for {workflow_execution_id}, {workflow_results}") async with AsyncSession(engine_async) as session: await session.exec( update(WorkflowExecution)