diff --git a/.github/workflows/test-pr.yml b/.github/workflows/test-pr.yml
index e07f3ebc0..d90712551 100644
--- a/.github/workflows/test-pr.yml
+++ b/.github/workflows/test-pr.yml
@@ -95,7 +95,7 @@ jobs:
LOG_LEVEL: DEBUG
SQLALCHEMY_WARN_20: 1
run: |
- poetry run coverage run --branch -m pytest --timeout 20 -n auto --non-integration --ignore=tests/e2e_tests/
+ poetry run coverage run --branch -m pytest -n auto --non-integration --ignore=tests/e2e_tests/
- name: Run integration tests and report coverage
run: |
diff --git a/docs/mint.json b/docs/mint.json
index 373398997..8d27632e5 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -122,6 +122,7 @@
"providers/documentation/checkmk-provider",
"providers/documentation/cilium-provider",
"providers/documentation/clickhouse-provider",
+ "providers/documentation/clickhouse-http-provider",
"providers/documentation/cloudwatch-provider",
"providers/documentation/console-provider",
"providers/documentation/coralogix-provider",
diff --git a/docs/providers/documentation/clickhouse-http-provider.mdx b/docs/providers/documentation/clickhouse-http-provider.mdx
new file mode 100644
index 000000000..cca132719
--- /dev/null
+++ b/docs/providers/documentation/clickhouse-http-provider.mdx
@@ -0,0 +1,7 @@
+---
+title: 'ClickHouse HTTP'
+sidebarTitle: 'ClickHouse HTTP Provider'
+description: 'ClickHouse HTTP provider allows you to interact with ClickHouse database.'
+---
+
+This provider is an async (more performant) analog of [clickhouse-provider](providers/documentation/clickhouse-provider.mdx). It's using HTTP protocol to interact to the Clickhouse.
\ No newline at end of file
diff --git a/docs/providers/overview.mdx b/docs/providers/overview.mdx
index 30c9b523e..129c42dd2 100644
--- a/docs/providers/overview.mdx
+++ b/docs/providers/overview.mdx
@@ -164,6 +164,12 @@ By leveraging Keep Providers, users are able to deeply integrate Keep with the t
}
>
+ }
+>
+
None:
try:
workflow_manager = WorkflowManager.get_instance()
- workflow_manager.insert_incident(self.tenant_id, incident_dto, action)
+ asyncio.run(workflow_manager.insert_incident(self.tenant_id, incident_dto, action))
except Exception:
self.logger.exception(
"Failed to run workflows based on incident",
@@ -233,6 +234,7 @@ def delete_incident(self, incident_id: UUID) -> None:
self.update_client_on_incident_change()
self.send_workflow_event(incident_dto, "deleted")
+
def update_incident(
self,
incident_id: UUID,
diff --git a/keep/api/core/db.py b/keep/api/core/db.py
index d17817421..402cde8cc 100644
--- a/keep/api/core/db.py
+++ b/keep/api/core/db.py
@@ -39,6 +39,7 @@
from sqlalchemy.exc import IntegrityError, OperationalError
from sqlalchemy.orm import joinedload, subqueryload
from sqlalchemy.sql import exists, expression
+from sqlmodel.ext.asyncio.session import AsyncSession
from sqlmodel import Session, SQLModel, col, or_, select, text
from keep.api.consts import STATIC_PRESETS
@@ -82,6 +83,8 @@
engine = create_db_engine()
+engine_async = create_db_engine(_async=True)
+
SQLAlchemyInstrumentor().instrument(enable_commenter=True, engine=engine)
@@ -145,7 +148,7 @@ def __convert_to_uuid(value: str) -> UUID | None:
return None
-def create_workflow_execution(
+async def create_workflow_execution(
workflow_id: str,
tenant_id: str,
triggered_by: str,
@@ -155,7 +158,7 @@ def create_workflow_execution(
execution_id: str = None,
event_type: str = "alert",
) -> str:
- with Session(engine) as session:
+ async with AsyncSession(engine_async) as session:
try:
if len(triggered_by) > 255:
triggered_by = triggered_by[:255]
@@ -170,7 +173,7 @@ def create_workflow_execution(
)
session.add(workflow_execution)
# Ensure the object has an id
- session.flush()
+ await session.flush()
execution_id = workflow_execution.id
if KEEP_AUDIT_EVENTS_ENABLED:
if fingerprint and event_type == "alert":
@@ -188,10 +191,10 @@ def create_workflow_execution(
)
session.add(workflow_to_incident_execution)
- session.commit()
+ await session.commit()
return execution_id
except IntegrityError:
- session.rollback()
+ await session.rollback()
logger.debug(
f"Failed to create a new execution for workflow {workflow_id}. Constraint is met."
)
@@ -226,12 +229,12 @@ def get_last_completed_execution(
).first()
-def get_workflows_that_should_run():
- with Session(engine) as session:
+async def get_workflows_that_should_run():
+ async with AsyncSession(engine_async) as session:
logger.debug("Checking for workflows that should run")
workflows_with_interval = []
try:
- result = session.exec(
+ result = await session.exec(
select(Workflow)
.filter(Workflow.is_deleted == False)
.filter(Workflow.is_disabled == False)
@@ -252,7 +255,7 @@ def get_workflows_that_should_run():
if not last_execution:
try:
# try to get the lock
- workflow_execution_id = create_workflow_execution(
+ workflow_execution_id = await create_workflow_execution(
workflow.id, workflow.tenant_id, "scheduler"
)
# we succeed to get the lock on this execution number :)
@@ -274,7 +277,7 @@ def get_workflows_that_should_run():
):
try:
# try to get the lock with execution_number + 1
- workflow_execution_id = create_workflow_execution(
+ workflow_execution_id = await create_workflow_execution(
workflow.id,
workflow.tenant_id,
"scheduler",
@@ -294,10 +297,10 @@ def get_workflows_that_should_run():
# some other thread/instance has already started to work on it
except IntegrityError:
# we need to verify the locking is still valid and not timeouted
- session.rollback()
+ await session.rollback()
pass
# get the ongoing execution
- ongoing_execution = session.exec(
+ ongoing_execution = await session.exec(
select(WorkflowExecution)
.where(WorkflowExecution.workflow_id == workflow.id)
.where(
@@ -319,10 +322,10 @@ def get_workflows_that_should_run():
# if the ongoing execution runs more than 60 minutes, than its timeout
elif ongoing_execution.started + timedelta(minutes=60) <= current_time:
ongoing_execution.status = "timeout"
- session.commit()
+ await session.commit()
# re-create the execution and try to get the lock
try:
- workflow_execution_id = create_workflow_execution(
+ workflow_execution_id = await create_workflow_execution(
workflow.id,
workflow.tenant_id,
"scheduler",
@@ -479,25 +482,29 @@ def get_last_workflow_workflow_to_alert_executions(
return latest_workflow_to_alert_executions
-def get_last_workflow_execution_by_workflow_id(
+async def get_last_workflow_execution_by_workflow_id(
tenant_id: str, workflow_id: str, status: str = None
) -> Optional[WorkflowExecution]:
- with Session(engine) as session:
- query = (
- session.query(WorkflowExecution)
- .filter(WorkflowExecution.workflow_id == workflow_id)
- .filter(WorkflowExecution.tenant_id == tenant_id)
- .filter(WorkflowExecution.started >= datetime.now() - timedelta(days=1))
- .order_by(WorkflowExecution.started.desc())
+ async with AsyncSession(engine_async) as session:
+ await session.flush()
+ q = select(WorkflowExecution).filter(
+ WorkflowExecution.workflow_id == workflow_id
+ ).filter(WorkflowExecution.tenant_id == tenant_id).filter(
+ WorkflowExecution.started >= datetime.now() - timedelta(days=1)
+ ).order_by(
+ WorkflowExecution.started.desc()
)
- if status:
- query = query.filter(WorkflowExecution.status == status)
+ if status is not None:
+ q = q.filter(WorkflowExecution.status == status)
- workflow_execution = query.first()
+ workflow_execution = (
+ (await session.exec(q)).first()
+ )
return workflow_execution
+
def get_workflows_with_last_execution(tenant_id: str) -> List[dict]:
with Session(engine) as session:
latest_execution_cte = (
@@ -582,30 +589,32 @@ def get_all_workflows_yamls(tenant_id: str) -> List[str]:
return workflows
-def get_workflow(tenant_id: str, workflow_id: str) -> Workflow:
- with Session(engine) as session:
+async def get_workflow(tenant_id: str, workflow_id: str) -> Workflow:
+ async with AsyncSession(engine_async) as session:
# if the workflow id is uuid:
if validators.uuid(workflow_id):
- workflow = session.exec(
+ workflow = await session.exec(
select(Workflow)
.where(Workflow.tenant_id == tenant_id)
.where(Workflow.id == workflow_id)
.where(Workflow.is_deleted == False)
- ).first()
+ )
+ workflow = workflow.first()
else:
- workflow = session.exec(
+ workflow = await session.exec(
select(Workflow)
.where(Workflow.tenant_id == tenant_id)
.where(Workflow.name == workflow_id)
.where(Workflow.is_deleted == False)
- ).first()
+ )
+ workflow = workflow.first()
if not workflow:
return None
return workflow
-def get_raw_workflow(tenant_id: str, workflow_id: str) -> str:
- workflow = get_workflow(tenant_id, workflow_id)
+async def get_raw_workflow(tenant_id: str, workflow_id: str) -> str:
+ workflow = await get_workflow(tenant_id, workflow_id)
if not workflow:
return None
return workflow.workflow_raw
@@ -653,33 +662,42 @@ def get_consumer_providers() -> List[Provider]:
return providers
-def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, error):
- with Session(engine) as session:
- workflow_execution = session.exec(
+async def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, error):
+ async with AsyncSession(engine_async) as session:
+ random_number = random.randint(1, 2147483647 - 1) # max int
+
+ workflow_execution_old = (await session.exec(
select(WorkflowExecution).where(WorkflowExecution.id == execution_id)
- ).first()
- # some random number to avoid collisions
- if not workflow_execution:
+ )).first()
+
+ try:
+ execution_time = (datetime.utcnow() - workflow_execution_old.started).total_seconds()
+ except AttributeError:
+ execution_time = 0
+ logging.warning(f"Failed to calculate execution time for {execution_id}")
+
+ # Perform the update query
+ result = await session.exec(
+ update(WorkflowExecution)
+ .where(WorkflowExecution.id == execution_id)
+ .values(
+ is_running=random_number,
+ status=status,
+ error=error[:255] if error else None,
+ execution_time=execution_time
+ )
+ )
+
+ # Check if the update affected any rows
+ if result.rowcount == 0:
logger.warning(
- f"Failed to finish workflow execution {execution_id} for workflow {workflow_id}. Execution not found.",
- extra={
- "tenant_id": tenant_id,
- "workflow_id": workflow_id,
- "execution_id": execution_id,
- },
+ f"Failed to finish workflow execution {execution_id} for workflow {workflow_id}. Execution not found."
)
raise ValueError("Execution not found")
- workflow_execution.is_running = random.randint(1, 2147483647 - 1) # max int
- workflow_execution.status = status
- # TODO: we had a bug with the error field, it was too short so some customers may fail over it.
- # we need to fix it in the future, create a migration that increases the size of the error field
- # and then we can remove the [:511] from here
- workflow_execution.error = error[:511] if error else None
- workflow_execution.execution_time = (
- datetime.utcnow() - workflow_execution.started
- ).total_seconds()
- # TODO: logs
- session.commit()
+
+ # Commit the transaction
+ await session.commit()
+ await session.flush()
def get_workflow_executions(
@@ -787,14 +805,14 @@ def delete_workflow_by_provisioned_file(tenant_id, provisioned_file):
session.commit()
-def get_workflow_id(tenant_id, workflow_name):
- with Session(engine) as session:
- workflow = session.exec(
+async def get_workflow_id(tenant_id, workflow_name):
+ async with AsyncSession(engine_async) as session:
+ workflow = (await session.exec(
select(Workflow)
.where(Workflow.tenant_id == tenant_id)
.where(Workflow.name == workflow_name)
.where(Workflow.is_deleted == False)
- ).first()
+ )).first()
if workflow:
return workflow.id
@@ -1606,16 +1624,16 @@ def update_user_role(tenant_id, username, role):
return user
-def save_workflow_results(tenant_id, workflow_execution_id, workflow_results):
- with Session(engine) as session:
- workflow_execution = session.exec(
- select(WorkflowExecution)
+async def save_workflow_results(tenant_id, workflow_execution_id, workflow_results):
+ async with AsyncSession(engine_async) as session:
+ await session.exec(
+ update(WorkflowExecution)
.where(WorkflowExecution.tenant_id == tenant_id)
.where(WorkflowExecution.id == workflow_execution_id)
- ).one()
-
- workflow_execution.results = workflow_results
- session.commit()
+ .values(results=workflow_results)
+ )
+ await session.commit()
+ await session.flush()
def get_workflow_by_name(tenant_id, workflow_name):
@@ -1629,10 +1647,10 @@ def get_workflow_by_name(tenant_id, workflow_name):
return workflow
-
-def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id):
- with Session(engine) as session:
- previous_execution = session.exec(
+
+async def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id):
+ async with AsyncSession(engine_async) as session:
+ previous_execution = (await session.exec(
select(WorkflowExecution)
.where(WorkflowExecution.tenant_id == tenant_id)
.where(WorkflowExecution.workflow_id == workflow_id)
@@ -1642,13 +1660,14 @@ def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id):
) # no need to check more than 1 day ago
.order_by(WorkflowExecution.started.desc())
.limit(1)
- ).first()
+ )).first()
if previous_execution:
return previous_execution
else:
return None
+
def create_rule(
tenant_id,
name,
diff --git a/keep/api/core/db_utils.py b/keep/api/core/db_utils.py
index 97ffa3100..e4dd8c482 100644
--- a/keep/api/core/db_utils.py
+++ b/keep/api/core/db_utils.py
@@ -15,6 +15,7 @@
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.ddl import CreateColumn
from sqlalchemy.sql.functions import GenericFunction
+from sqlalchemy.ext.asyncio import create_async_engine
from sqlmodel import Session, create_engine
# This import is required to create the tables
@@ -124,31 +125,62 @@ def dumps(_json) -> str:
return json.dumps(_json, default=str)
-def create_db_engine():
+def asynchronize_connection_string(connection_string):
+ """
+ We want to make sure keep is able to work after an update to async.
+ We also may assume some customers hardcoded async drivers to the connection strings
+ so we substitute sync drivers to async on the fly.
+ """
+ if type(connection_string) is not str:
+ return connection_string
+
+ if connection_string.startswith('sqlite:'):
+ connection_string = connection_string.replace('sqlite:', 'sqlite+aiosqlite:', 1)
+ logging.error(f"DB connection string updated to: {connection_string} to support async.")
+
+ if connection_string.startswith('postgresql+psycopg2:'):
+ connection_string = connection_string.replace('postgresql+psycopg2:', 'postgresql+psycopg:', 1)
+ logging.error(f"DB connection string updated to: {connection_string} to support async.")
+
+ if connection_string.startswith('mysql+pymysql:'):
+ connection_string = connection_string.replace('mysql+pymysql:', 'mysql+asyncmy:', 1)
+ logging.error(f"DB connection string updated to: {connection_string} to support async.")
+
+ return connection_string
+
+
+def create_db_engine(_async=False):
"""
Creates a database engine based on the environment variables.
"""
+ if _async:
+ creator_method = create_async_engine
+ db_connecton_string = asynchronize_connection_string(DB_CONNECTION_STRING)
+ else:
+ creator_method = create_engine
+ db_connecton_string = DB_CONNECTION_STRING
+
if RUNNING_IN_CLOUD_RUN and not KEEP_FORCE_CONNECTION_STRING:
- engine = create_engine(
- "mysql+pymysql://",
+ engine = creator_method(
+ "mysql+asyncmy://",
creator=__get_conn,
echo=DB_ECHO,
json_serializer=dumps,
pool_size=DB_POOL_SIZE,
max_overflow=DB_MAX_OVERFLOW,
)
- elif DB_CONNECTION_STRING == "impersonate":
- engine = create_engine(
- "mysql+pymysql://",
+ elif db_connecton_string == "impersonate":
+ engine = creator_method(
+ "mysql+asyncmy://",
creator=__get_conn_impersonate,
echo=DB_ECHO,
json_serializer=dumps,
)
- elif DB_CONNECTION_STRING:
+ elif db_connecton_string:
try:
logger.info(f"Creating a connection pool with size {DB_POOL_SIZE}")
- engine = create_engine(
- DB_CONNECTION_STRING,
+ engine = creator_method(
+ db_connecton_string,
pool_size=DB_POOL_SIZE,
max_overflow=DB_MAX_OVERFLOW,
json_serializer=dumps,
@@ -157,12 +189,12 @@ def create_db_engine():
)
# SQLite does not support pool_size
except TypeError:
- engine = create_engine(
- DB_CONNECTION_STRING, json_serializer=dumps, echo=DB_ECHO
+ engine = creator_method(
+ db_connecton_string, json_serializer=dumps, echo=DB_ECHO
)
else:
- engine = create_engine(
- "sqlite:///./keep.db",
+ engine = creator_method(
+ "sqlite+aiosqlite:///./keep.db",
connect_args={"check_same_thread": False},
echo=DB_ECHO,
json_serializer=dumps,
diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py
index 7a5912996..b9e8ce6da 100644
--- a/keep/api/routes/workflows.py
+++ b/keep/api/routes/workflows.py
@@ -168,7 +168,7 @@ def export_workflows(
"/{workflow_id}/run",
description="Run a workflow",
)
-def run_workflow(
+async def run_workflow(
workflow_id: str,
event_type: Optional[str] = Query(None),
event_id: Optional[str] = Query(None),
@@ -231,7 +231,7 @@ def run_workflow(
detail="Invalid event format",
)
- workflow_execution_id = workflowmanager.scheduler.handle_manual_event_workflow(
+ workflow_execution_id = await workflowmanager.scheduler.handle_manual_event_workflow(
workflow_id, tenant_id, created_by, event
)
except Exception as e:
@@ -274,7 +274,7 @@ async def run_workflow_from_definition(
workflowstore = WorkflowStore()
workflowmanager = WorkflowManager.get_instance()
try:
- workflow = workflowstore.get_workflow_from_dict(
+ workflow = await workflowstore.get_workflow_from_dict(
tenant_id=tenant_id, workflow=workflow
)
except Exception as e:
@@ -288,7 +288,7 @@ async def run_workflow_from_definition(
)
try:
- workflow_execution = workflowmanager.scheduler.handle_workflow_test(
+ workflow_execution = await workflowmanager.scheduler.handle_workflow_test(
workflow, tenant_id, created_by
)
except Exception as e:
@@ -497,7 +497,7 @@ async def update_workflow_by_id(
"""
tenant_id = authenticated_entity.tenant_id
logger.info(f"Updating workflow {workflow_id}", extra={"tenant_id": tenant_id})
- workflow_from_db = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
+ workflow_from_db = await get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
if not workflow_from_db:
logger.warning(
f"Tenant tried to update workflow {workflow_id} that does not exist",
@@ -528,7 +528,7 @@ async def update_workflow_by_id(
@router.get("/{workflow_id}/raw", description="Get workflow executions by ID")
-def get_raw_workflow_by_id(
+async def get_raw_workflow_by_id(
workflow_id: str,
authenticated_entity: AuthenticatedEntity = Depends(
IdentityManagerFactory.get_auth_verifier(["read:workflows"])
@@ -539,7 +539,7 @@ def get_raw_workflow_by_id(
return JSONResponse(
status_code=200,
content={
- "workflow_raw": workflowstore.get_raw_workflow(
+ "workflow_raw": await workflowstore.get_raw_workflow(
tenant_id=tenant_id, workflow_id=workflow_id
)
},
@@ -547,7 +547,7 @@ def get_raw_workflow_by_id(
@router.get("/{workflow_id}", description="Get workflow by ID")
-def get_workflow_by_id(
+async def get_workflow_by_id(
workflow_id: str,
authenticated_entity: AuthenticatedEntity = Depends(
IdentityManagerFactory.get_auth_verifier(["read:workflows"])
@@ -555,7 +555,7 @@ def get_workflow_by_id(
):
tenant_id = authenticated_entity.tenant_id
# get all workflow
- workflow = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
+ workflow = await get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
if not workflow:
logger.warning(
@@ -590,7 +590,7 @@ def get_workflow_by_id(
@router.get("/{workflow_id}/runs", description="Get workflow executions by ID")
-def get_workflow_runs_by_id(
+async def get_workflow_runs_by_id(
workflow_id: str,
tab: int = 1,
limit: int = 25,
@@ -603,7 +603,7 @@ def get_workflow_runs_by_id(
),
) -> WorkflowExecutionsPaginatedResultsDto:
tenant_id = authenticated_entity.tenant_id
- workflow = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
+ workflow = await get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
installed_providers = get_installed_providers(tenant_id)
installed_providers_by_type = {}
for installed_provider in installed_providers:
diff --git a/keep/api/tasks/process_event_task.py b/keep/api/tasks/process_event_task.py
index 4b9b7fcb7..4a73f77b3 100644
--- a/keep/api/tasks/process_event_task.py
+++ b/keep/api/tasks/process_event_task.py
@@ -1,4 +1,5 @@
# builtins
+import asyncio
import copy
import datetime
import json
@@ -425,7 +426,8 @@ def __handle_formatted_events(
workflow_manager = WorkflowManager.get_instance()
# insert the events to the workflow manager process queue
logger.info("Adding events to the workflow manager queue")
- workflow_manager.insert_events(tenant_id, enriched_formatted_events)
+ loop = asyncio.get_event_loop()
+ loop.run(workflow_manager.insert_events(tenant_id, enriched_formatted_events))
logger.info("Added events to the workflow manager queue")
except Exception:
logger.exception(
@@ -452,7 +454,7 @@ def __handle_formatted_events(
# if new grouped incidents were created, we need to push them to the client
# if incidents:
# logger.info("Adding group alerts to the workflow manager queue")
- # workflow_manager.insert_events(tenant_id, grouped_alerts)
+ # asyncio.run(workflow_manager.insert_events(tenant_id, grouped_alerts))
# logger.info("Added group alerts to the workflow manager queue")
except Exception:
logger.exception(
diff --git a/keep/contextmanager/contextmanager.py b/keep/contextmanager/contextmanager.py
index 3283ea506..a6cf1c337 100644
--- a/keep/contextmanager/contextmanager.py
+++ b/keep/contextmanager/contextmanager.py
@@ -1,4 +1,5 @@
# TODO - refactor context manager to support multitenancy in a more robust way
+import asyncio
import logging
import click
@@ -54,11 +55,9 @@ def __init__(
"last_workflow_results" in workflow_str
)
if last_workflow_results_in_workflow:
- last_workflow_execution = (
- get_last_workflow_execution_by_workflow_id(
- tenant_id, workflow_id, status="success"
- )
- )
+ last_workflow_execution = asyncio.run(get_last_workflow_execution_by_workflow_id(
+ tenant_id, workflow_id, status="success"
+ ))
if last_workflow_execution is not None:
self.last_workflow_execution_results = (
last_workflow_execution.results
@@ -259,8 +258,8 @@ def set_step_vars(self, step_id, _vars):
self.current_step_vars = _vars
self.steps_context[step_id]["vars"] = _vars
- def get_last_workflow_run(self, workflow_id):
- return get_last_workflow_execution_by_workflow_id(self.tenant_id, workflow_id)
+ async def get_last_workflow_run(self, workflow_id):
+ return await get_last_workflow_execution_by_workflow_id(self.tenant_id, workflow_id)
def dump(self):
self.logger.info("Dumping logs to db")
diff --git a/keep/parser/parser.py b/keep/parser/parser.py
index 72c30c7ad..c5164c83c 100644
--- a/keep/parser/parser.py
+++ b/keep/parser/parser.py
@@ -20,7 +20,7 @@ class Parser:
def __init__(self):
self.logger = logging.getLogger(__name__)
- def _get_workflow_id(self, tenant_id, workflow: dict) -> str:
+ async def _get_workflow_id(self, tenant_id, workflow: dict) -> str:
"""Support both CLI and API workflows
Args:
@@ -39,7 +39,7 @@ def _get_workflow_id(self, tenant_id, workflow: dict) -> str:
raise ValueError("Workflow dict must have an id")
# get the workflow id from the database
- workflow_id = get_workflow_id(tenant_id, workflow_name)
+ workflow_id = await get_workflow_id(tenant_id, workflow_name)
# if the workflow id is not found, it means that the workflow is not stored in the db
# for example when running from CLI
# so for backward compatibility, we will use the workflow name as the id
@@ -48,7 +48,7 @@ def _get_workflow_id(self, tenant_id, workflow: dict) -> str:
workflow_id = workflow_name
return workflow_id
- def parse(
+ async def parse(
self,
tenant_id,
parsed_workflow_yaml: dict,
@@ -72,7 +72,7 @@ def parse(
"workflows"
) or parsed_workflow_yaml.get("alerts")
workflows = [
- self._parse_workflow(
+ await self._parse_workflow(
tenant_id,
workflow,
providers_file,
@@ -87,7 +87,7 @@ def parse(
raw_workflow = parsed_workflow_yaml.get(
"workflow"
) or parsed_workflow_yaml.get("alert")
- workflow = self._parse_workflow(
+ workflow = await self._parse_workflow(
tenant_id,
raw_workflow,
providers_file,
@@ -98,7 +98,7 @@ def parse(
workflows = [workflow]
# else, if it stored in the db, it stored without the "workflow" key
else:
- workflow = self._parse_workflow(
+ workflow = await self._parse_workflow(
tenant_id,
parsed_workflow_yaml,
providers_file,
@@ -126,7 +126,7 @@ def _get_workflow_provider_types_from_steps_and_actions(
)
return provider_types
- def _parse_workflow(
+ async def _parse_workflow(
self,
tenant_id,
workflow: dict,
@@ -136,7 +136,7 @@ def _parse_workflow(
workflow_actions: dict = None,
) -> Workflow:
self.logger.debug("Parsing workflow")
- workflow_id = self._get_workflow_id(tenant_id, workflow)
+ workflow_id = await self._get_workflow_id(tenant_id, workflow)
context_manager = ContextManager(
tenant_id=tenant_id, workflow_id=workflow_id, workflow=workflow
)
diff --git a/keep/providers/base/base_provider.py b/keep/providers/base/base_provider.py
index f92588d2e..f4de5ee17 100644
--- a/keep/providers/base/base_provider.py
+++ b/keep/providers/base/base_provider.py
@@ -3,9 +3,12 @@
"""
import abc
+import asyncio
+from concurrent.futures import ThreadPoolExecutor
import copy
import datetime
import hashlib
+import inspect
import itertools
import json
import logging
@@ -67,6 +70,7 @@ class BaseProvider(metaclass=abc.ABCMeta):
Literal["alert", "ticketing", "messaging", "data", "queue", "topology"]
] = []
WEBHOOK_INSTALLATION_REQUIRED = False # webhook installation is required for this provider, making it required in the UI
+ thread_executor_for_sync_methods = ThreadPoolExecutor()
def __init__(
self,
@@ -157,15 +161,24 @@ def validate_scopes(self) -> dict[str, bool | str]:
"""
return {}
- def notify(self, **kwargs):
+ async def notify(self, **kwargs):
"""
Output alert message.
Args:
**kwargs (dict): The provider context (with statement)
"""
- # trigger the provider
- results = self._notify(**kwargs)
+ # Trigger the provider, allow async and non-async functions
+ if inspect.iscoroutinefunction(self._notify):
+ results = await self._notify(**kwargs)
+ else:
+ loop = asyncio.get_running_loop()
+ # Running in a thread executor to avoid blocking the event loop
+ results = await loop.run_in_executor(
+ self.__class__.thread_executor_for_sync_methods,
+ lambda: self._notify(**kwargs)
+ )
+ self.logger.warning(f"Provider {self.provider_type} notify method is not async. This may cause performance issues.")
self.results.append(results)
# if the alert should be enriched, enrich it
enrich_alert = kwargs.get("enrich_alert", [])
@@ -311,9 +324,18 @@ def _query(self, **kwargs: dict):
"""
raise NotImplementedError("query() method not implemented")
- def query(self, **kwargs: dict):
- # just run the query
- results = self._query(**kwargs)
+ async def query(self, **kwargs: dict):
+ # Run the query, it may be sync or async
+ if inspect.iscoroutinefunction(self._query):
+ results = await self._query(**kwargs)
+ else:
+ loop = asyncio.get_running_loop()
+ # Running in a thread executor to avoid blocking the event loop
+ results = await loop.run_in_executor(
+ self.__class__.thread_executor_for_sync_methods,
+ lambda: self._query(**kwargs)
+ )
+ self.logger.warning(f"Provider {self.provider_type} _query method is not async. This may cause performance issues")
self.results.append(results)
# now add the type of the results to the global context
if results and isinstance(results, list):
diff --git a/keep/providers/clickhouse_http_provider/__init__.py b/keep/providers/clickhouse_http_provider/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py
new file mode 100644
index 000000000..f38805986
--- /dev/null
+++ b/keep/providers/clickhouse_http_provider/clickhouse_http_provider.py
@@ -0,0 +1,153 @@
+"""
+Clickhouse is a class that provides a way to read data from Clickhouse.
+"""
+
+import asyncio
+import pydantic
+import dataclasses
+
+import clickhouse_connect
+
+from keep.contextmanager.contextmanager import ContextManager
+from keep.providers.base.base_provider import BaseProvider
+from keep.providers.models.provider_config import ProviderConfig, ProviderScope
+from keep.validation.fields import NoSchemeUrl, UrlPort
+
+
+@pydantic.dataclasses.dataclass
+class ClickhouseHttpProviderAuthConfig:
+ username: str = dataclasses.field(
+ metadata={"required": True, "description": "Clickhouse username"}
+ )
+ password: str = dataclasses.field(
+ metadata={
+ "required": True,
+ "description": "Clickhouse password",
+ "sensitive": True,
+ }
+ )
+ host: NoSchemeUrl = dataclasses.field(
+ metadata={
+ "required": True,
+ "description": "Clickhouse hostname",
+ "validation": "no_scheme_url",
+ }
+ )
+ port: UrlPort = dataclasses.field(
+ metadata={
+ "required": True,
+ "description": "Clickhouse port",
+ "validation": "port",
+ }
+ )
+ database: str | None = dataclasses.field(
+ metadata={"required": False, "description": "Clickhouse database name"},
+ default=None,
+ )
+
+
+class ClickhouseHttpProvider(BaseProvider):
+ """Enrich alerts with data from Clickhouse."""
+
+ PROVIDER_DISPLAY_NAME = "Clickhouse HTTP"
+ PROVIDER_CATEGORY = ["Database"]
+
+ PROVIDER_SCOPES = [
+ ProviderScope(
+ name="connect_to_server",
+ description="The user can connect to the server",
+ mandatory=True,
+ alias="Connect to the server",
+ )
+ ]
+ SHARED_CLIENT = {} # Caching the client to avoid creating a new one for each query
+
+ def __init__(
+ self, context_manager: ContextManager, provider_id: str, config: ProviderConfig
+ ):
+ super().__init__(context_manager, provider_id, config)
+ self.client = None
+
+ def dispose(self):
+ pass
+
+ def validate_scopes(self):
+ """
+ Validates that the user has the required scopes to use the provider.
+ """
+ try:
+ client = asyncio.run(self.__generate_client())
+
+ tables = asyncio.run(client.query("SHOW TABLES"))
+ self.logger.info(f"Tables: {tables}")
+
+ scopes = {
+ "connect_to_server": True,
+ }
+ except Exception as e:
+ self.logger.exception("Error validating scopes")
+ scopes = {
+ "connect_to_server": str(e),
+ }
+ return scopes
+
+ async def __generate_client(self):
+ """
+ Generates a Clickhouse client.
+ """
+ if self.context_manager.tenant_id + self.provider_id in ClickhouseHttpProvider.SHARED_CLIENT:
+ return ClickhouseHttpProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id]
+
+ user = self.authentication_config.username
+ password = self.authentication_config.password
+ host = self.authentication_config.host
+ database = self.authentication_config.database
+ port = self.authentication_config.port
+
+ client = await clickhouse_connect.get_async_client(
+ host=host,
+ port=port,
+ user=user,
+ password=password,
+ database=database,
+ )
+ ClickhouseHttpProvider.SHARED_CLIENT[self.context_manager.tenant_id + self.provider_id] = client
+
+ return client
+
+ def validate_config(self):
+ """
+ Validates required configuration for Clickhouse's provider.
+ """
+ self.authentication_config = ClickhouseHttpProviderAuthConfig(
+ **self.config.authentication
+ )
+ return True
+
+ async def _query(self, query="", single_row=False, **kwargs: dict) -> list | tuple:
+ """
+ Executes a query against the Clickhouse database.
+ Returns:
+ list | tuple: list of results or single result if single_row is True
+ """
+ return await self._notify(query=query, single_row=single_row, **kwargs)
+
+ async def _notify(self, query="", single_row=False, **kwargs: dict) -> list | tuple:
+ """
+ Executes a query against the Clickhouse database.
+ Returns:
+ list | tuple: list of results or single result if single_row is True
+ """
+ # return {'dt': datetime.datetime(2024, 12, 4, 6, 37, 22), 'customer_id': 99999999, 'total_spent': 19.850000381469727}
+ client = await self.__generate_client()
+ results = await client.query(query, **kwargs)
+ rows = results.result_rows
+ columns = results.column_names
+
+ # Making the results more human readable and compatible with the format we had with sync library before.
+ results = [dict(zip(columns, row)) for row in rows]
+
+ if single_row:
+ return results[0]
+
+ return results
\ No newline at end of file
diff --git a/keep/step/step.py b/keep/step/step.py
index 7f51350d7..004d5ebc7 100644
--- a/keep/step/step.py
+++ b/keep/step/step.py
@@ -56,12 +56,12 @@ def name(self):
def continue_to_next_step(self):
return self.__continue_to_next_step
- def run(self):
+ async def run(self):
try:
if self.config.get("foreach"):
- did_action_run = self._run_foreach()
+ did_action_run = await self._run_foreach()
else:
- did_action_run = self._run_single()
+ did_action_run = await self._run_single()
return did_action_run
except Exception as e:
self.logger.error(
@@ -106,7 +106,7 @@ def _get_foreach_items(self) -> list | list[list]:
return []
return len(foreach_items) == 1 and foreach_items[0] or zip(*foreach_items)
- def _run_foreach(self):
+ async def _run_foreach(self):
"""Evaluate the action for each item, when using the `foreach` attribute (see foreach.md)"""
# the item holds the value we are going to iterate over
items = self._get_foreach_items()
@@ -115,7 +115,7 @@ def _run_foreach(self):
for item in items:
self.context_manager.set_for_each_context(item)
try:
- did_action_run = self._run_single()
+ did_action_run = await self._run_single()
except Exception as e:
self.logger.error(f"Failed to run action with error {e}")
continue
@@ -125,7 +125,7 @@ def _run_foreach(self):
any_action_run = True
return any_action_run
- def _run_single(self):
+ async def _run_single(self):
# Initialize all conditions
conditions = []
self.context_manager.set_step_vars(self.step_id, _vars=self.vars)
@@ -257,11 +257,11 @@ def _run_single(self):
)
try:
if self.step_type == StepType.STEP:
- step_output = self.provider.query(
+ step_output = await self.provider.query(
**rendered_providers_parameters
)
else:
- step_output = self.provider.notify(
+ step_output = await self.provider.notify(
**rendered_providers_parameters
)
# exiting the loop as step/action execution was successful
diff --git a/keep/workflowmanager/workflow.py b/keep/workflowmanager/workflow.py
index efac5d4c7..db03cc21c 100644
--- a/keep/workflowmanager/workflow.py
+++ b/keep/workflowmanager/workflow.py
@@ -53,12 +53,12 @@ def __init__(
self.io_nandler = IOHandler(context_manager)
self.logger = self.context_manager.get_logger()
- def run_steps(self):
+ async def run_steps(self):
self.logger.debug(f"Running steps for workflow {self.workflow_id}")
for step in self.workflow_steps:
try:
self.logger.info("Running step %s", step.step_id)
- step_ran = step.run()
+ step_ran = await step.run()
if step_ran:
self.logger.info("Step %s ran successfully", step.step_id)
# if the step ran + the step configured to stop the workflow:
@@ -73,11 +73,11 @@ def run_steps(self):
raise
self.logger.debug(f"Steps for workflow {self.workflow_id} ran successfully")
- def run_action(self, action: Step):
+ async def run_action(self, action: Step):
self.logger.info("Running action %s", action.name)
try:
action_stop = False
- action_ran = action.run()
+ action_ran = await action.run()
action_error = None
if action_ran:
self.logger.info("Action %s ran successfully", action.name)
@@ -93,12 +93,12 @@ def run_action(self, action: Step):
action_error = f"Failed to run action {action.name}: {str(e)}"
return action_ran, action_error, action_stop
- def run_actions(self):
+ async def run_actions(self):
self.logger.debug("Running actions")
actions_firing = []
actions_errors = []
for action in self.workflow_actions:
- action_status, action_error, action_stop = self.run_action(action)
+ action_status, action_error, action_stop = await self.run_action(action)
if action_error:
actions_firing.append(action_status)
actions_errors.append(action_error)
@@ -109,14 +109,14 @@ def run_actions(self):
self.logger.debug("Actions ran")
return actions_firing, actions_errors
- def run(self, workflow_execution_id):
+ async def run(self, workflow_execution_id):
if self.workflow_disabled:
self.logger.info(f"Skipping disabled workflow {self.workflow_id}")
return
self.logger.info(f"Running workflow {self.workflow_id}")
self.context_manager.set_execution_context(workflow_execution_id)
try:
- self.run_steps()
+ await self.run_steps()
except StepError as e:
self.logger.error(
f"Workflow {self.workflow_id} failed: {e}",
@@ -125,6 +125,6 @@ def run(self, workflow_execution_id):
},
)
raise
- actions_firing, actions_errors = self.run_actions()
+ actions_firing, actions_errors = await self.run_actions()
self.logger.info(f"Finish to run workflow {self.workflow_id}")
return actions_errors
diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py
index a8f7d0a07..2bedf0045 100644
--- a/keep/workflowmanager/workflowmanager.py
+++ b/keep/workflowmanager/workflowmanager.py
@@ -3,6 +3,7 @@
import re
import typing
import uuid
+import asyncio
from keep.api.core.config import config
from keep.api.core.db import (
@@ -44,18 +45,16 @@ async def start(self):
if self.started:
self.logger.info("Workflow manager already started")
return
-
await self.scheduler.start()
self.started = True
- def stop(self):
+ async def stop(self):
"""Stops the workflow manager"""
if not self.started:
return
+
self.scheduler.stop()
self.started = False
- # Clear the scheduler reference
- self.scheduler = None
def _apply_filter(self, filter_val, value):
# if it's a regex, apply it
@@ -76,11 +75,11 @@ def _apply_filter(self, filter_val, value):
return value == str(filter_val)
return value == filter_val
- def _get_workflow_from_store(self, tenant_id, workflow_model):
+ async def _get_workflow_from_store(self, tenant_id, workflow_model):
try:
# get the actual workflow that can be triggered
self.logger.info("Getting workflow from store")
- workflow = self.workflow_store.get_workflow(tenant_id, workflow_model.id)
+ workflow = await self.workflow_store.get_workflow(tenant_id, workflow_model.id)
self.logger.info("Got workflow from store")
return workflow
except ProviderConfigurationException:
@@ -100,7 +99,7 @@ def _get_workflow_from_store(self, tenant_id, workflow_model):
},
)
- def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str):
+ async def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str):
all_workflow_models = self.workflow_store.get_all_workflows(tenant_id)
self.logger.info(
"Got all workflows",
@@ -116,7 +115,9 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str):
f"tenant_id={workflow_model.tenant_id} - Workflow is disabled."
)
continue
- workflow = self._get_workflow_from_store(tenant_id, workflow_model)
+
+ workflow = await self._get_workflow_from_store(tenant_id, workflow_model)
+
if workflow is None:
continue
@@ -149,7 +150,7 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str):
)
self.logger.info("Workflow added to run")
- def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]):
+ async def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]):
for event in events:
self.logger.info("Getting all workflows")
all_workflow_models = self.workflow_store.get_all_workflows(tenant_id)
@@ -167,7 +168,7 @@ def insert_events(self, tenant_id, events: typing.List[AlertDto | IncidentDto]):
f"tenant_id={workflow_model.tenant_id} - Workflow is disabled."
)
continue
- workflow = self._get_workflow_from_store(tenant_id, workflow_model)
+ workflow = await self._get_workflow_from_store(tenant_id, workflow_model)
if workflow is None:
continue
@@ -360,7 +361,7 @@ def _check_premium_providers(self, workflow: Workflow):
f"Provider {provider} is a premium provider. You can self-host or contact us to get access to it."
)
- def _run_workflow_on_failure(
+ async def _run_workflow_on_failure(
self, workflow: Workflow, workflow_execution_id: str, error_message: str
):
"""
@@ -385,7 +386,7 @@ def _run_workflow_on_failure(
f"Workflow {workflow.workflow_id} failed with errors: {error_message}"
)
workflow.on_failure.provider_parameters = {"message": message}
- workflow.on_failure.run()
+ await workflow.on_failure.run()
self.logger.info(
"Ran on_failure action for workflow",
extra={
@@ -404,8 +405,9 @@ def _run_workflow_on_failure(
},
)
+
@timing_histogram(workflow_execution_duration)
- def _run_workflow(
+ async def _run_workflow(
self, workflow: Workflow, workflow_execution_id: str, test_run=False
):
self.logger.debug(f"Running workflow {workflow.workflow_id}")
@@ -413,9 +415,9 @@ def _run_workflow(
results = {}
try:
self._check_premium_providers(workflow)
- errors = workflow.run(workflow_execution_id)
+ errors = await workflow.run(workflow_execution_id)
if errors:
- self._run_workflow_on_failure(
+ await self._run_workflow_on_failure(
workflow, workflow_execution_id, ", ".join(errors)
)
except Exception as e:
@@ -423,7 +425,7 @@ def _run_workflow(
f"Error running workflow {workflow.workflow_id}",
extra={"exception": e, "workflow_execution_id": workflow_execution_id},
)
- self._run_workflow_on_failure(workflow, workflow_execution_id, str(e))
+ await self._run_workflow_on_failure(workflow, workflow_execution_id, str(e))
raise
finally:
if not test_run:
@@ -437,7 +439,7 @@ def _run_workflow(
if test_run:
results = self._get_workflow_results(workflow)
else:
- self._save_workflow_results(workflow, workflow_execution_id)
+ await self._save_workflow_results(workflow, workflow_execution_id)
return [errors, results]
@@ -462,7 +464,7 @@ def _get_workflow_results(workflow: Workflow):
)
return workflow_results
- def _save_workflow_results(self, workflow: Workflow, workflow_execution_id: str):
+ async def _save_workflow_results(self, workflow: Workflow, workflow_execution_id: str):
"""
Save the results of the workflow to the DB.
@@ -479,7 +481,7 @@ def _save_workflow_results(self, workflow: Workflow, workflow_execution_id: str)
{step.name: step.provider.results for step in workflow.workflow_steps}
)
try:
- save_workflow_results(
+ await save_workflow_results(
tenant_id=workflow.context_manager.tenant_id,
workflow_execution_id=workflow_execution_id,
workflow_results=workflow_results,
@@ -497,9 +499,9 @@ def _run_workflows_from_cli(self, workflows: typing.List[Workflow]):
for workflow in workflows:
try:
random_workflow_id = str(uuid.uuid4())
- errors, _ = self._run_workflow(
+ errors, _ = asyncio.run(self._run_workflow(
workflow, workflow_execution_id=random_workflow_id
- )
+ ))
workflows_errors.append(errors)
except Exception as e:
self.logger.error(
diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py
index 18ecca9cf..caf705898 100644
--- a/keep/workflowmanager/workflowscheduler.py
+++ b/keep/workflowmanager/workflowscheduler.py
@@ -1,3 +1,4 @@
+import asyncio
import enum
import hashlib
import logging
@@ -82,11 +83,12 @@ def __init__(self, workflow_manager):
self.interval_enabled = (
config("WORKFLOWS_INTERVAL_ENABLED", default="true") == "true"
)
+ self.task = None
self.executor = ThreadPoolExecutor(
max_workers=self.MAX_WORKERS,
thread_name_prefix="WorkflowScheduler",
)
- self.scheduler_future = None
+ self.run_future = None
self.futures = set()
# Initialize metrics for queue size
self._update_queue_metrics()
@@ -100,14 +102,14 @@ def _update_queue_metrics(self):
len(self.workflows_to_run)
)
- async def start(self):
+ async def start(self, loop=None):
self.logger.info("Starting workflows scheduler")
# Shahar: fix for a bug in unit tests
self._stop = False
- self.scheduler_future = self.executor.submit(self._start)
+ self.run_future = asyncio.create_task(self._run())
self.logger.info("Workflows scheduler started")
- def _handle_interval_workflows(self):
+ async def _handle_interval_workflows(self):
workflows = []
if not self.interval_enabled:
@@ -116,7 +118,7 @@ def _handle_interval_workflows(self):
try:
# get all workflows that should run due to interval
- workflows = get_workflows_that_should_run()
+ workflows = await get_workflows_that_should_run()
except Exception:
self.logger.exception("Error getting workflows that should run")
pass
@@ -126,6 +128,7 @@ def _handle_interval_workflows(self):
workflow_id = workflow.get("workflow_id")
try:
+ workflow = await self.workflow_store.get_workflow(tenant_id, workflow_id)
workflow_obj = self.workflow_store.get_workflow(tenant_id, workflow_id)
except ProviderConfigurationException:
self.logger.exception(
@@ -136,7 +139,7 @@ def _handle_interval_workflows(self):
"tenant_id": tenant_id,
},
)
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -146,7 +149,7 @@ def _handle_interval_workflows(self):
continue
except Exception as e:
self.logger.error(f"Error getting workflow: {e}")
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -155,17 +158,18 @@ def _handle_interval_workflows(self):
)
continue
- future = self.executor.submit(
- self._run_workflow,
- tenant_id,
- workflow_id,
- workflow_obj,
- workflow_execution_id,
+ future = asyncio.create_task(
+ self._run_workflow(
+ tenant_id,
+ workflow_id,
+ workflow_obj,
+ workflow_execution_id,
+ )
)
self.futures.add(future)
future.add_done_callback(lambda f: self.futures.remove(f))
- def _run_workflow(
+ async def _run_workflow(
self,
tenant_id,
workflow_id,
@@ -175,7 +179,7 @@ def _run_workflow(
):
if READ_ONLY_MODE:
self.logger.debug("Sleeping for 3 seconds in favor of read only mode")
- time.sleep(3)
+ await asyncio.sleep(3)
self.logger.info(f"Running workflow {workflow.workflow_id}...")
@@ -205,7 +209,7 @@ def _run_workflow(
else:
workflow.context_manager.set_incident_context(event_context)
- errors, _ = self.workflow_manager._run_workflow(
+ errors, _ = await self.workflow_manager._run_workflow(
workflow, workflow_execution_id
)
except Exception as e:
@@ -221,7 +225,7 @@ def _run_workflow(
).inc()
self.logger.exception(f"Failed to run workflow {workflow.workflow_id}...")
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -236,7 +240,7 @@ def _run_workflow(
if any(errors):
self.logger.info(msg=f"Workflow {workflow.workflow_id} ran with errors")
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -244,7 +248,7 @@ def _run_workflow(
error="\n".join(str(e) for e in errors),
)
else:
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -253,8 +257,11 @@ def _run_workflow(
)
self.logger.info(f"Workflow {workflow.workflow_id} ran")
+ return True
+
def handle_workflow_test(self, workflow, tenant_id, triggered_by_user):
+
workflow_execution_id = self._get_unique_execution_number()
self.logger.info(
@@ -318,7 +325,7 @@ def run_workflow_wrapper(
"results": results,
}
- def handle_manual_event_workflow(
+ async def handle_manual_event_workflow(
self, workflow_id, tenant_id, triggered_by_user, event: [AlertDto | IncidentDto]
):
self.logger.info(f"Running manual event workflow {workflow_id}...")
@@ -335,7 +342,7 @@ def handle_manual_event_workflow(
event_type = "alert"
fingerprint = event.fingerprint
- workflow_execution_id = create_workflow_execution(
+ workflow_execution_id = await create_workflow_execution(
workflow_id=workflow_id,
tenant_id=tenant_id,
triggered_by=f"manually by {triggered_by_user}",
@@ -391,7 +398,7 @@ def _get_unique_execution_number(self, fingerprint=None):
WorkflowScheduler.MAX_SIZE_SIGNED_INT + 1
)
- def _handle_event_workflows(self):
+ async def _handle_event_workflows(self):
# TODO - event workflows should be in DB too, to avoid any state problems.
# take out all items from the workflows to run and run them, also, clean the self.workflows_to_run list
@@ -419,13 +426,13 @@ def _handle_event_workflows(self):
if not workflow:
self.logger.info("Loading workflow")
try:
- workflow = self.workflow_store.get_workflow(
+ workflow = await self.workflow_store.get_workflow(
workflow_id=workflow_id, tenant_id=tenant_id
)
# In case the provider are not configured properly
except ProviderConfigurationException as e:
self.logger.error(f"Error getting workflow: {e}")
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -435,7 +442,7 @@ def _handle_event_workflows(self):
continue
except Exception as e:
self.logger.error(f"Error getting workflow: {e}")
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -479,7 +486,7 @@ def _handle_event_workflows(self):
workflow_execution_number = self._get_unique_execution_number(
fingerprint
)
- workflow_execution_id = create_workflow_execution(
+ workflow_execution_id = await create_workflow_execution(
workflow_id=workflow_id,
tenant_id=tenant_id,
triggered_by=triggered_by,
@@ -526,7 +533,7 @@ def _handle_event_workflows(self):
"tenant_id": tenant_id,
},
)
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -585,7 +592,7 @@ def _handle_event_workflows(self):
"tenant_id": tenant_id,
},
)
- self._finish_workflow_execution(
+ await self._finish_workflow_execution(
tenant_id=tenant_id,
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
@@ -593,15 +600,14 @@ def _handle_event_workflows(self):
error=f"Error getting alert by id: {e}",
)
continue
- # Last, run the workflow
- future = self.executor.submit(
- self._run_workflow,
+ # Last, run the workflow in the current event loop.
+ future = asyncio.create_task(self._run_workflow(
tenant_id,
workflow_id,
workflow,
workflow_execution_id,
event,
- )
+ ))
self.futures.add(future)
future.add_done_callback(lambda f: self.futures.remove(f))
@@ -610,7 +616,8 @@ def _handle_event_workflows(self):
extra={"current_number_of_workflows": len(self.futures)},
)
- def _start(self):
+
+ async def _run(self):
self.logger.info("Starting workflows scheduler")
while not self._stop:
# get all workflows that should run now
@@ -619,15 +626,15 @@ def _start(self):
extra={"current_number_of_workflows": len(self.futures)},
)
try:
- self._handle_interval_workflows()
- self._handle_event_workflows()
+ await self._handle_interval_workflows()
+ await self._handle_event_workflows()
except Exception:
# This is the "mainloop" of the scheduler, we don't want to crash it
# But any exception here should be investigated
self.logger.exception("Error getting workflows that should run")
pass
self.logger.debug("Sleeping until next iteration")
- time.sleep(1)
+ await asyncio.sleep(1)
self.logger.info("Workflows scheduler stopped")
def stop(self):
@@ -635,9 +642,9 @@ def stop(self):
self._stop = True
# Wait for scheduler to stop first
- if self.scheduler_future:
+ if self.run_future:
try:
- self.scheduler_future.result(
+ self.run_future.result(
timeout=5
) # Add timeout to prevent hanging
except Exception:
@@ -658,7 +665,6 @@ def stop(self):
try:
self.logger.info("Shutting down executor")
self.executor.shutdown(wait=True, cancel_futures=True)
- self.executor = None
self.logger.info("Executor shut down")
except Exception:
self.logger.exception("Error shutting down executor")
@@ -666,7 +672,8 @@ def stop(self):
self.futures.clear()
self.logger.info("Scheduled workflows stopped")
- def _finish_workflow_execution(
+
+ async def _finish_workflow_execution(
self,
tenant_id: str,
workflow_id: str,
@@ -675,7 +682,7 @@ def _finish_workflow_execution(
error=None,
):
# mark the workflow execution as finished in the db
- finish_workflow_execution_db(
+ await finish_workflow_execution_db(
tenant_id=tenant_id,
workflow_id=workflow_id,
execution_id=workflow_execution_id,
@@ -685,7 +692,7 @@ def _finish_workflow_execution(
if KEEP_EMAILS_ENABLED:
# get the previous workflow execution id
- previous_execution = get_previous_execution_id(
+ previous_execution = await get_previous_execution_id(
tenant_id, workflow_id, workflow_execution_id
)
# if error, send an email
@@ -694,7 +701,7 @@ def _finish_workflow_execution(
is None # this means this is the first execution, for example
or previous_execution.status != WorkflowStatus.ERROR.value
):
- workflow = get_workflow_db(tenant_id=tenant_id, workflow_id=workflow_id)
+ workflow = await get_workflow_db(tenant_id=tenant_id, workflow_id=workflow_id)
try:
keep_platform_url = config(
"KEEP_PLATFORM_URL", default="https://platform.keephq.dev"
diff --git a/keep/workflowmanager/workflowstore.py b/keep/workflowmanager/workflowstore.py
index 109a4d2b2..48974492e 100644
--- a/keep/workflowmanager/workflowstore.py
+++ b/keep/workflowmanager/workflowstore.py
@@ -1,3 +1,4 @@
+import asyncio
import io
import logging
import os
@@ -63,7 +64,7 @@ def create_workflow(self, tenant_id: str, created_by, workflow: dict):
def delete_workflow(self, tenant_id, workflow_id):
self.logger.info(f"Deleting workflow {workflow_id}")
- workflow = get_workflow(tenant_id, workflow_id)
+ workflow = asyncio.run(get_workflow(tenant_id, workflow_id))
if not workflow:
raise HTTPException(
status_code=404, detail=f"Workflow {workflow_id} not found"
@@ -98,21 +99,21 @@ def _parse_workflow_to_dict(self, workflow_path: str) -> dict:
with open(workflow_path, "r") as file:
return self._read_workflow_from_stream(file)
- def get_raw_workflow(self, tenant_id: str, workflow_id: str) -> str:
- raw_workflow = get_raw_workflow(tenant_id, workflow_id)
+ async def get_raw_workflow(self, tenant_id: str, workflow_id: str) -> str:
+ raw_workflow = await get_raw_workflow(tenant_id, workflow_id)
workflow_yaml = yaml.safe_load(raw_workflow)
valid_workflow_yaml = {"workflow": workflow_yaml}
return yaml.dump(valid_workflow_yaml, width=99999)
- def get_workflow(self, tenant_id: str, workflow_id: str) -> Workflow:
- workflow = get_raw_workflow(tenant_id, workflow_id)
+ async def get_workflow(self, tenant_id: str, workflow_id: str) -> Workflow:
+ workflow = await get_raw_workflow(tenant_id, workflow_id)
if not workflow:
raise HTTPException(
status_code=404,
detail=f"Workflow {workflow_id} not found",
)
workflow_yaml = yaml.safe_load(workflow)
- workflow = self.parser.parse(tenant_id, workflow_yaml)
+ workflow = await self.parser.parse(tenant_id, workflow_yaml)
if len(workflow) > 1:
raise HTTPException(
status_code=500,
@@ -126,9 +127,9 @@ def get_workflow(self, tenant_id: str, workflow_id: str) -> Workflow:
detail=f"Workflow {workflow_id} not found",
)
- def get_workflow_from_dict(self, tenant_id: str, workflow: dict) -> Workflow:
+ async def get_workflow_from_dict(self, tenant_id: str, workflow: dict) -> Workflow:
logging.info("Parsing workflow from dict", extra={"workflow": workflow})
- workflow = self.parser.parse(tenant_id, workflow)
+ workflow = await self.parser.parse(tenant_id, workflow)
if workflow:
return workflow[0]
else:
@@ -158,7 +159,7 @@ def get_all_workflows_yamls(self, tenant_id: str) -> list[str]:
workflow_yamls = get_all_workflows_yamls(tenant_id)
return workflow_yamls
- def get_workflows_from_path(
+ async def get_workflows_from_path(
self,
tenant_id,
workflow_path: str | tuple[str],
@@ -181,25 +182,25 @@ def get_workflows_from_path(
for workflow_url in workflow_path:
workflow_yaml = self._parse_workflow_to_dict(workflow_url)
workflows.extend(
- self.parser.parse(
+ await self.parser.parse(
tenant_id, workflow_yaml, providers_file, actions_file
)
)
elif os.path.isdir(workflow_path):
workflows.extend(
- self._get_workflows_from_directory(
+ await self._get_workflows_from_directory(
tenant_id, workflow_path, providers_file, actions_file
)
)
else:
workflow_yaml = self._parse_workflow_to_dict(workflow_path)
- workflows = self.parser.parse(
+ workflows = await self.parser.parse(
tenant_id, workflow_yaml, providers_file, actions_file
)
return workflows
- def _get_workflows_from_directory(
+ async def _get_workflows_from_directory(
self,
tenant_id,
workflows_dir: str,
diff --git a/poetry.lock b/poetry.lock
index 56d053477..857ecbc90 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -147,6 +147,24 @@ files = [
[package.dependencies]
frozenlist = ">=1.1.0"
+[[package]]
+name = "aiosqlite"
+version = "0.20.0"
+description = "asyncio bridge to the standard sqlite3 module"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"},
+ {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"},
+]
+
+[package.dependencies]
+typing_extensions = ">=4.0"
+
+[package.extras]
+dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"]
+docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"]
+
[[package]]
name = "alembic"
version = "1.14.0"
@@ -297,6 +315,71 @@ files = [
{file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"},
]
+[[package]]
+name = "asyncmy"
+version = "0.2.10"
+description = "A fast asyncio MySQL driver"
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "asyncmy-0.2.10-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:c2237c8756b8f374099bd320c53b16f7ec0cee8258f00d72eed5a2cd3d251066"},
+ {file = "asyncmy-0.2.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:6e98d4fbf7ea0d99dfecb24968c9c350b019397ba1af9f181d51bb0f6f81919b"},
+ {file = "asyncmy-0.2.10-cp310-cp310-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b1b1ee03556c7eda6422afc3aca132982a84706f8abf30f880d642f50670c7ed"},
+ {file = "asyncmy-0.2.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e2b97672ea3f0b335c0ffd3da1a5727b530f82f5032cd87e86c3aa3ac6df7f3"},
+ {file = "asyncmy-0.2.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c6471ce1f9ae1e6f0d55adfb57c49d0bcf5753a253cccbd33799ddb402fe7da2"},
+ {file = "asyncmy-0.2.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10e2a10fe44a2b216a1ae58fbdafa3fed661a625ec3c030c560c26f6ab618522"},
+ {file = "asyncmy-0.2.10-cp310-cp310-win32.whl", hash = "sha256:a791ab117787eb075bc37ed02caa7f3e30cca10f1b09ec7eeb51d733df1d49fc"},
+ {file = "asyncmy-0.2.10-cp310-cp310-win_amd64.whl", hash = "sha256:bd16fdc0964a4a1a19aec9797ca631c3ff2530013fdcd27225fc2e48af592804"},
+ {file = "asyncmy-0.2.10-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7af0f1f31f800a8789620c195e92f36cce4def68ee70d625534544d43044ed2a"},
+ {file = "asyncmy-0.2.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:800116ab85dc53b24f484fb644fefffac56db7367a31e7d62f4097d495105a2c"},
+ {file = "asyncmy-0.2.10-cp311-cp311-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:39525e9d7e557b83db268ed14b149a13530e0d09a536943dba561a8a1c94cc07"},
+ {file = "asyncmy-0.2.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76e199d6b57918999efc702d2dbb182cb7ba8c604cdfc912517955219b16eaea"},
+ {file = "asyncmy-0.2.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ca8fdd7dbbf2d9b4c2d3a5fac42b058707d6a483b71fded29051b8ae198a250"},
+ {file = "asyncmy-0.2.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0df23db54e38602c803dacf1bbc1dcc4237a87223e659681f00d1a319a4f3826"},
+ {file = "asyncmy-0.2.10-cp311-cp311-win32.whl", hash = "sha256:a16633032be020b931acfd7cd1862c7dad42a96ea0b9b28786f2ec48e0a86757"},
+ {file = "asyncmy-0.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:cca06212575922216b89218abd86a75f8f7375fc9c28159ea469f860785cdbc7"},
+ {file = "asyncmy-0.2.10-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:42295530c5f36784031f7fa42235ef8dd93a75d9b66904de087e68ff704b4f03"},
+ {file = "asyncmy-0.2.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:641a853ffcec762905cbeceeb623839c9149b854d5c3716eb9a22c2b505802af"},
+ {file = "asyncmy-0.2.10-cp312-cp312-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:c554874223dd36b1cfc15e2cd0090792ea3832798e8fe9e9d167557e9cf31b4d"},
+ {file = "asyncmy-0.2.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd16e84391dde8edb40c57d7db634706cbbafb75e6a01dc8b68a63f8dd9e44ca"},
+ {file = "asyncmy-0.2.10-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9f6b44c4bf4bb69a2a1d9d26dee302473099105ba95283b479458c448943ed3c"},
+ {file = "asyncmy-0.2.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:16d398b1aad0550c6fe1655b6758455e3554125af8aaf1f5abdc1546078c7257"},
+ {file = "asyncmy-0.2.10-cp312-cp312-win32.whl", hash = "sha256:59d2639dcc23939ae82b93b40a683c15a091460a3f77fa6aef1854c0a0af99cc"},
+ {file = "asyncmy-0.2.10-cp312-cp312-win_amd64.whl", hash = "sha256:4c6674073be97ffb7ac7f909e803008b23e50281131fef4e30b7b2162141a574"},
+ {file = "asyncmy-0.2.10-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:85bc4522d8b632cd3327001a00cb24416883fc3905857737b99aa00bc0703fe1"},
+ {file = "asyncmy-0.2.10-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:c93768dde803c7c118e6ac1893f98252e48fecad7c20bb7e27d4bdf3d130a044"},
+ {file = "asyncmy-0.2.10-cp38-cp38-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:93b6d7db19a093abdeceb454826ff752ce1917288635d5d63519068ef5b2f446"},
+ {file = "asyncmy-0.2.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acecd4bbb513a67a94097fd499dac854546e07d2ff63c7fb5f4d2c077e4bdf91"},
+ {file = "asyncmy-0.2.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1b4b346c02fca1d160005d4921753bb00ed03422f0c6ec90936c43aad96b7d52"},
+ {file = "asyncmy-0.2.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8d393570e1c96ca200075797cc4f80849fc0ea960a45c6035855b1d392f33768"},
+ {file = "asyncmy-0.2.10-cp38-cp38-win32.whl", hash = "sha256:c8ee5282af5f38b4dc3ae94a3485688bd6c0d3509ba37226dbaa187f1708e32c"},
+ {file = "asyncmy-0.2.10-cp38-cp38-win_amd64.whl", hash = "sha256:10b3dfb119d7a9cb3aaae355c0981e60934f57297ea560bfdb280c5d85f77a9d"},
+ {file = "asyncmy-0.2.10-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:244289bd1bea84384866bde50b09fe5b24856640e30a04073eacb71987b7b6ad"},
+ {file = "asyncmy-0.2.10-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:6c9d024b160b9f869a21e62c4ef34a7b7a4b5a886ae03019d4182621ea804d2c"},
+ {file = "asyncmy-0.2.10-cp39-cp39-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b57594eea942224626203503f24fa88a47eaab3f13c9f24435091ea910f4b966"},
+ {file = "asyncmy-0.2.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346192941470ac2d315f97afa14c0131ff846c911da14861baf8a1f8ed541664"},
+ {file = "asyncmy-0.2.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:957c2b48c5228e5f91fdf389daf38261a7b8989ad0eb0d1ba4e5680ef2a4a078"},
+ {file = "asyncmy-0.2.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:472989d7bfa405c108a7f3c408bbed52306504fb3aa28963d833cb7eeaafece0"},
+ {file = "asyncmy-0.2.10-cp39-cp39-win32.whl", hash = "sha256:714b0fdadd72031e972de2bbbd14e35a19d5a7e001594f0c8a69f92f0d05acc9"},
+ {file = "asyncmy-0.2.10-cp39-cp39-win_amd64.whl", hash = "sha256:9fb58645d3da0b91db384f8519b16edc7dc421c966ada8647756318915d63696"},
+ {file = "asyncmy-0.2.10-pp310-pypy310_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f10c977c60a95bd6ec6b8654e20c8f53bad566911562a7ad7117ca94618f05d3"},
+ {file = "asyncmy-0.2.10-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:aab07fbdb9466beaffef136ffabe388f0d295d8d2adb8f62c272f1d4076515b9"},
+ {file = "asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:63144322ade68262201baae73ad0c8a06b98a3c6ae39d1f3f21c41cc5287066a"},
+ {file = "asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9659d95c6f2a611aec15bdd928950df937bf68bc4bbb68b809ee8924b6756067"},
+ {file = "asyncmy-0.2.10-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8ced4bd938e95ede0fb9fa54755773df47bdb9f29f142512501e613dd95cf4a4"},
+ {file = "asyncmy-0.2.10-pp38-pypy38_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f76080d5d360635f0c67411fb3fb890d7a5a9e31135b4bb07c6a4e588287b671"},
+ {file = "asyncmy-0.2.10-pp38-pypy38_pp73-macosx_14_0_arm64.whl", hash = "sha256:fde04da1a3e656ec7d7656b2d02ade87df9baf88cc1ebeff5d2288f856c086a4"},
+ {file = "asyncmy-0.2.10-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:a83383cc6951bcde11c9cdda216a0849d29be2002a8fb6405ea6d9e5ced4ec69"},
+ {file = "asyncmy-0.2.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58c3d8c12030c23df93929c8371da818211fa02c7b50cd178960c0a88e538adf"},
+ {file = "asyncmy-0.2.10-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e0c8706ff7fc003775f3fc63804ea45be61e9ac9df9fd968977f781189d625ed"},
+ {file = "asyncmy-0.2.10-pp39-pypy39_pp73-macosx_13_0_x86_64.whl", hash = "sha256:4651caaee6f4d7a8eb478a0dc460f8e91ab09a2d8d32444bc2b235544c791947"},
+ {file = "asyncmy-0.2.10-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:ac091b327f01c38d91c697c810ba49e5f836890d48f6879ba0738040bb244290"},
+ {file = "asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:e1d2d9387cd3971297486c21098e035c620149c9033369491f58fe4fc08825b6"},
+ {file = "asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a760cb486ddb2c936711325236e6b9213564a9bb5deb2f6949dbd16c8e4d739e"},
+ {file = "asyncmy-0.2.10-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1586f26633c05b16bcfc46d86e9875f4941280e12afa79a741cdf77ae4ccfb4d"},
+ {file = "asyncmy-0.2.10.tar.gz", hash = "sha256:f4b67edadf7caa56bdaf1c2e6cf451150c0a86f5353744deabe4426fe27aff4e"},
+]
+
[[package]]
name = "attrs"
version = "24.3.0"
@@ -546,17 +629,17 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "boto3"
-version = "1.35.84"
+version = "1.35.90"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "boto3-1.35.84-py3-none-any.whl", hash = "sha256:c94fc8023caf952f8740a48fc400521bba167f883cfa547d985c05fda7223f7a"},
- {file = "boto3-1.35.84.tar.gz", hash = "sha256:9f9bf72d92f7fdd546b974ffa45fa6715b9af7f5c00463e9d0f6ef9c95efe0c2"},
+ {file = "boto3-1.35.90-py3-none-any.whl", hash = "sha256:b0874233057995a8f0c813f5b45a36c09630e74c43d7a7c64db2feef2915d493"},
+ {file = "boto3-1.35.90.tar.gz", hash = "sha256:dc56caaaab2157a4bfc109c88b50cd032f3ac66c06d17f8ee335b798eaf53e5c"},
]
[package.dependencies]
-botocore = ">=1.35.84,<1.36.0"
+botocore = ">=1.35.90,<1.36.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -565,13 +648,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.35.84"
+version = "1.35.90"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.35.84-py3-none-any.whl", hash = "sha256:b4dc2ac7f54ba959429e1debbd6c7c2fb2349baa1cd63803f0682f0773dbd077"},
- {file = "botocore-1.35.84.tar.gz", hash = "sha256:f86754882e04683e2e99a6a23377d0dd7f1fc2b2242844b2381dbe4dcd639301"},
+ {file = "botocore-1.35.90-py3-none-any.whl", hash = "sha256:51dcbe1b32e2ac43dac17091f401a00ce5939f76afe999081802009cce1e92e4"},
+ {file = "botocore-1.35.90.tar.gz", hash = "sha256:f007f58e8e3c1ad0412a6ddfae40ed92a7bca571c068cb959902bcf107f2ae48"},
]
[package.dependencies]
@@ -739,116 +822,103 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "3.4.0"
+version = "3.4.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
-python-versions = ">=3.7.0"
+python-versions = ">=3.7"
files = [
- {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
- {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
- {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"},
+ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"},
+ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"},
]
[[package]]
@@ -864,13 +934,13 @@ files = [
[[package]]
name = "click"
-version = "8.1.7"
+version = "8.1.8"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
files = [
- {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
- {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
+ {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
]
[package.dependencies]
@@ -1035,73 +1105,73 @@ files = [
[[package]]
name = "coverage"
-version = "7.6.9"
+version = "7.6.10"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.9"
files = [
- {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"},
- {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"},
- {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"},
- {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"},
- {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"},
- {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"},
- {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"},
- {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"},
- {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"},
- {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"},
- {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"},
- {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"},
- {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"},
- {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"},
- {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"},
- {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"},
- {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"},
- {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"},
- {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"},
- {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"},
- {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"},
- {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"},
- {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"},
- {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"},
- {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"},
- {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"},
- {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"},
- {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"},
- {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"},
- {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"},
- {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"},
- {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"},
- {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"},
- {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"},
- {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"},
- {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"},
- {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"},
- {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"},
- {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"},
- {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"},
- {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"},
- {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"},
- {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"},
- {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"},
- {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"},
- {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"},
- {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"},
- {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"},
- {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"},
- {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"},
- {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"},
- {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"},
- {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"},
- {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"},
- {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"},
- {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"},
- {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"},
- {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"},
- {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"},
- {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"},
- {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"},
- {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"},
+ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"},
+ {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"},
+ {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"},
+ {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"},
+ {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"},
+ {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"},
+ {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"},
+ {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"},
+ {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"},
+ {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"},
+ {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"},
+ {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"},
+ {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"},
+ {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"},
+ {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"},
+ {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"},
+ {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"},
+ {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"},
+ {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"},
+ {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"},
+ {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"},
+ {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"},
+ {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"},
+ {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"},
+ {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"},
+ {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"},
+ {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"},
+ {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"},
+ {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"},
+ {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"},
+ {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"},
+ {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"},
+ {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"},
+ {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"},
+ {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"},
+ {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"},
+ {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"},
+ {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"},
+ {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"},
+ {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"},
+ {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"},
+ {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"},
+ {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"},
+ {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"},
+ {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"},
+ {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"},
+ {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"},
+ {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"},
+ {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"},
+ {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"},
+ {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"},
+ {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"},
+ {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"},
+ {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"},
+ {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"},
+ {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"},
+ {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"},
+ {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"},
+ {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"},
+ {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"},
+ {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"},
+ {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"},
]
[package.extras]
@@ -2203,13 +2273,13 @@ parser = ["pyhcl (>=0.4.4,<0.5.0)"]
[[package]]
name = "identify"
-version = "2.6.3"
+version = "2.6.4"
description = "File identification library for Python"
optional = false
python-versions = ">=3.9"
files = [
- {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"},
- {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"},
+ {file = "identify-2.6.4-py2.py3-none-any.whl", hash = "sha256:993b0f01b97e0568c179bb9196391ff391bfb88a99099dbf5ce392b68f42d0af"},
+ {file = "identify-2.6.4.tar.gz", hash = "sha256:285a7d27e397652e8cafe537a6cc97dd470a970f48fb2e9d979aa38eae5513ac"},
]
[package.extras]
@@ -2739,7 +2809,7 @@ name = "ndg-httpsclient"
version = "0.5.1"
description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL"
optional = false
-python-versions = ">=2.7,<3.0.0 || >=3.4.0"
+python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0"
files = [
{file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"},
{file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"},
@@ -2750,6 +2820,17 @@ files = [
pyasn1 = ">=0.1.1"
PyOpenSSL = "*"
+[[package]]
+name = "nest-asyncio"
+version = "1.6.0"
+description = "Patch asyncio to allow nested event loops"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
+ {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
+]
+
[[package]]
name = "nodeenv"
version = "1.9.1"
@@ -4167,13 +4248,13 @@ cli = ["click (>=5.0)"]
[[package]]
name = "python-engineio"
-version = "4.11.1"
+version = "4.11.2"
description = "Engine.IO server and client for Python"
optional = false
python-versions = ">=3.6"
files = [
- {file = "python_engineio-4.11.1-py3-none-any.whl", hash = "sha256:8ff9ec366724cd9b0fd92acf7a61b15ae923d28f37f842304adbd7f71b3d6672"},
- {file = "python_engineio-4.11.1.tar.gz", hash = "sha256:ff8a23a843c223ec793835f1bcf584ff89ce0f1c2bcce37dffa6436c6fa74133"},
+ {file = "python_engineio-4.11.2-py3-none-any.whl", hash = "sha256:f0971ac4c65accc489154fe12efd88f53ca8caf04754c46a66e85f5102ef22ad"},
+ {file = "python_engineio-4.11.2.tar.gz", hash = "sha256:145bb0daceb904b4bb2d3eb2d93f7dbb7bb87a6a0c4f20a94cc8654dec977129"},
]
[package.dependencies]
@@ -4259,13 +4340,13 @@ files = [
[[package]]
name = "python-socketio"
-version = "5.12.0"
+version = "5.12.1"
description = "Socket.IO server and client for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "python_socketio-5.12.0-py3-none-any.whl", hash = "sha256:50fe22fd2b0aa634df3e74489e42217b09af2fb22eee45f2c006df36d1d08cb9"},
- {file = "python_socketio-5.12.0.tar.gz", hash = "sha256:39b55bff4ef6ac5c39b8bbc38fa61962e22e15349b038c1ca7ee2e18824e06dc"},
+ {file = "python_socketio-5.12.1-py3-none-any.whl", hash = "sha256:24a0ea7cfff0e021eb28c68edbf7914ee4111bdf030b95e4d250c4dc9af7a386"},
+ {file = "python_socketio-5.12.1.tar.gz", hash = "sha256:0299ff1f470b676c09c1bfab1dead25405077d227b2c13cf217a34dadc68ba9c"},
]
[package.dependencies]
@@ -4534,6 +4615,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
+ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -4542,6 +4624,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
+ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -4550,6 +4633,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
+ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -4558,6 +4642,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
+ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -4566,6 +4651,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
+ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -5130,13 +5216,13 @@ python-socketio = {version = ">=5.0.0", extras = ["client"]}
[[package]]
name = "urllib3"
-version = "2.2.3"
+version = "2.3.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
- {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
+ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
+ {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
]
[package.extras]
@@ -5464,4 +5550,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.11,<3.12"
-content-hash = "089d3d061da28029a73cbe1b566b3d6ef531145407b322934821b1003ff9681d"
+content-hash = "21315efa1186eb473efe6a16eaaac7834d23feb9a2a00482176afa98e3ff1f25"
diff --git a/pyproject.toml b/pyproject.toml
index 47fc30ea5..1d3d0ed62 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,6 +5,10 @@ description = "Alerting. for developers, by developers."
authors = ["Keep Alerting LTD"]
packages = [{include = "keep"}]
+[tool.pytest.ini_options]
+asyncio_default_fixture_loop_scope = "function"
+timeout = 60
+
[tool.poetry.dependencies]
python = ">=3.11,<3.12"
click = "^8.1.3"
@@ -48,6 +52,7 @@ posthog = "^3.0.1"
google-cloud-storage = "^2.10.0"
auth0-python = "^4.4.1"
asyncio = "^3.4.3"
+nest-asyncio = "1.6.0"
python-multipart = "^0.0.18"
kubernetes = "^27.2.0"
opentelemetry-exporter-otlp-proto-grpc = "^1.20.0"
@@ -89,6 +94,9 @@ psycopg = "^3.2.3"
prometheus-client = "^0.21.1"
psycopg2-binary = "^2.9.10"
+aiosqlite = "^0.20.0"
+asyncmy = "^0.2.10"
+pytest-asyncio = "^0.25.0"
prometheus-fastapi-instrumentator = "^7.0.0"
slowapi = "^0.1.9"
[tool.poetry.group.dev.dependencies]
diff --git a/tests/conftest.py b/tests/conftest.py
index c3c831948..0cf1496a3 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,3 +1,4 @@
+import asyncio
import inspect
import os
import random
@@ -15,9 +16,11 @@
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
+from sqlalchemy.ext.asyncio import create_async_engine
from starlette_context import context, request_cycle_context
# This import is required to create the tables
+from keep.api.core.db_utils import asynchronize_connection_string
from keep.api.core.dependencies import SINGLE_TENANT_UUID
from keep.api.core.elastic import ElasticClient
from keep.api.models.db.alert import *
@@ -223,7 +226,7 @@ def db_session(request, monkeypatch):
mock_engine = create_engine(db_connection_string)
# sqlite
else:
- db_connection_string = "sqlite:///:memory:"
+ db_connection_string = "sqlite:///file:shared_memory?mode=memory&cache=shared&uri=true"
mock_engine = create_engine(
db_connection_string,
connect_args={"check_same_thread": False},
@@ -246,6 +249,8 @@ def do_begin(conn):
except Exception:
pass
+ mock_engine_async = create_async_engine(asynchronize_connection_string(db_connection_string))
+
SQLModel.metadata.create_all(mock_engine)
# Mock the environment variables so db.py will use it
@@ -314,9 +319,15 @@ def do_begin(conn):
session.add_all(workflow_data)
session.commit()
+ def mock_create_engine(_async=False):
+ if _async:
+ return mock_engine_async
+ return mock_engine
+
with patch("keep.api.core.db.engine", mock_engine):
- with patch("keep.api.core.db_utils.create_db_engine", return_value=mock_engine):
- yield session
+ with patch("keep.api.core.db.engine_async", mock_engine_async):
+ with patch("keep.api.core.db_utils.create_db_engine", side_effect=mock_create_engine):
+ yield session
import logging
@@ -407,14 +418,14 @@ def is_elastic_responsive(host, port, user, password):
info = elastic_client._client.info()
print("Elastic still up now")
return True if info else False
- except Exception:
- print("Elastic still not up")
- pass
+ except Exception as e:
+ print(f"Elastic still not up: {e}")
return False
@pytest.fixture(scope="session")
+@pytest.mark.asyncio
def elastic_container(docker_ip, docker_services):
try:
if os.getenv("SKIP_DOCKER") or os.getenv("GITHUB_ACTIONS") == "true":
diff --git a/tests/test_contextmanager.py b/tests/test_contextmanager.py
index fad627e5d..4691a8556 100644
--- a/tests/test_contextmanager.py
+++ b/tests/test_contextmanager.py
@@ -180,15 +180,15 @@ def test_context_manager_set_step_context(context_manager: ContextManager):
assert context_manager.steps_context["this"]["results"] == results
assert context_manager.steps_context[step_id]["results"] == results
-
-def test_context_manager_get_last_alert_run(
+@pytest.mark.asyncio
+async def test_context_manager_get_last_alert_run(
context_manager_with_state: ContextManager, db_session
):
workflow_id = "test-id-1"
alert_context = {"mock": "mock"}
alert_status = "firing"
context_manager_with_state.tenant_id = SINGLE_TENANT_UUID
- last_run = context_manager_with_state.get_last_workflow_run(workflow_id)
+ last_run = await context_manager_with_state.get_last_workflow_run(workflow_id)
if last_run is None:
pytest.fail("No workflow run found with the given workflow_id")
assert last_run == WorkflowExecution(
diff --git a/tests/test_enrichments.py b/tests/test_enrichments.py
index 2dd9c873d..b41751802 100644
--- a/tests/test_enrichments.py
+++ b/tests/test_enrichments.py
@@ -52,11 +52,13 @@ def mock_alert_dto():
)
-def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+async def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto, db_session):
# Assuming there are no extraction rules
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = (
[]
)
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
result_event = enrichment_bl.run_extraction_rules(mock_alert_dto)
@@ -65,7 +67,8 @@ def test_run_extraction_rules_no_rules_applies(mock_session, mock_alert_dto):
assert result_event == mock_alert_dto # Assuming no change if no rules
-def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto, db_session):
# Setup an extraction rule that should apply based on the alert content
rule = ExtractionRule(
id=1,
@@ -80,6 +83,7 @@ def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -92,7 +96,8 @@ def test_run_extraction_rules_regex_named_groups(mock_session, mock_alert_dto):
assert enriched_event.alert_type == "Alert"
-def test_run_extraction_rules_event_is_dict(mock_session):
+@pytest.mark.asyncio
+def test_run_extraction_rules_event_is_dict(mock_session, db_session):
event = {"name": "Test Alert", "source": ["source_test"]}
rule = ExtractionRule(
id=1,
@@ -106,6 +111,7 @@ def test_run_extraction_rules_event_is_dict(mock_session):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -118,10 +124,12 @@ def test_run_extraction_rules_event_is_dict(mock_session):
) # Ensuring the attribute is correctly processed
-def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto, db_session):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = (
[]
)
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
result_event = enrichment_bl.run_extraction_rules(mock_alert_dto)
@@ -131,7 +139,8 @@ def test_run_extraction_rules_no_rules(mock_session, mock_alert_dto):
) # Should return the original event if no rules apply
-def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto, db_session):
rule = ExtractionRule(
id=1,
tenant_id="test_tenant",
@@ -144,6 +153,7 @@ def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -155,7 +165,8 @@ def test_run_extraction_rules_attribute_no_template(mock_session, mock_alert_dto
) # Assuming the code does not modify the event if attribute is not in template format
-def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto, db_session):
rule = ExtractionRule(
id=1,
tenant_id="test_tenant",
@@ -168,6 +179,7 @@ def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -177,7 +189,8 @@ def test_run_extraction_rules_empty_attribute_value(mock_session, mock_alert_dto
assert enriched_event == mock_alert_dto # Check if event is unchanged
-def test_run_extraction_rules_handle_source_special_case(mock_session):
+@pytest.mark.asyncio
+def test_run_extraction_rules_handle_source_special_case(mock_session, db_session):
event = {"name": "Test Alert", "source": "incorrect_format"}
rule = ExtractionRule(
id=1,
@@ -191,6 +204,7 @@ def test_run_extraction_rules_handle_source_special_case(mock_session):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -212,7 +226,8 @@ def test_run_extraction_rules_handle_source_special_case(mock_session):
#### 2. Testing `run_extraction_rules` with CEL Conditions
-def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto, db_session):
rule = ExtractionRule(
id=2,
tenant_id="test_tenant",
@@ -226,6 +241,7 @@ def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
# Mocking the CEL environment to return True for the condition
with patch("chevron.render", return_value="test_source"), patch(
@@ -244,7 +260,8 @@ def test_run_extraction_rules_with_conditions(mock_session, mock_alert_dto):
assert enriched_event.source_name == "test_source"
-def test_run_mapping_rules_applies(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_mapping_rules_applies(mock_session, mock_alert_dto, db_session):
# Setup a mapping rule
rule = MappingRule(
id=1,
@@ -258,6 +275,7 @@ def test_run_mapping_rules_applies(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -267,7 +285,8 @@ def test_run_mapping_rules_applies(mock_session, mock_alert_dto):
assert mock_alert_dto.service == "new_service"
-def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto, db_session):
rule = MappingRule(
id=1,
tenant_id="test_tenant",
@@ -283,6 +302,7 @@ def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -311,7 +331,8 @@ def test_run_mapping_rules_with_regex_match(mock_session, mock_alert_dto):
), "Service should not match any entry"
-def test_run_mapping_rules_no_match(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_mapping_rules_no_match(mock_session, mock_alert_dto, db_session):
rule = MappingRule(
id=1,
tenant_id="test_tenant",
@@ -327,6 +348,7 @@ def test_run_mapping_rules_no_match(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
del mock_alert_dto.service
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -339,7 +361,8 @@ def test_run_mapping_rules_no_match(mock_session, mock_alert_dto):
), "Service should not match any entry"
-def test_check_matcher_with_and_condition(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_check_matcher_with_and_condition(mock_session, mock_alert_dto, db_session):
# Setup a mapping rule with && condition in matchers
rule = MappingRule(
id=1,
@@ -353,6 +376,7 @@ def test_check_matcher_with_and_condition(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -376,7 +400,8 @@ def test_check_matcher_with_and_condition(mock_session, mock_alert_dto):
assert result is False
-def test_check_matcher_with_or_condition(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_check_matcher_with_or_condition(mock_session, mock_alert_dto, db_session):
# Setup a mapping rule with || condition in matchers
rule = MappingRule(
id=1,
@@ -393,6 +418,7 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -428,7 +454,8 @@ def test_check_matcher_with_or_condition(mock_session, mock_alert_dto):
],
indirect=True,
)
-def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts):
+@pytest.mark.asyncio
+async def test_mapping_rule_with_elsatic(db_session, mock_session, mock_alert_dto, setup_alerts):
import os
# first, use elastic
@@ -449,6 +476,7 @@ def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id=SINGLE_TENANT_UUID, db=mock_session)
@@ -459,7 +487,8 @@ def test_mapping_rule_with_elsatic(mock_session, mock_alert_dto, setup_alerts):
@pytest.mark.parametrize("test_app", ["NO_AUTH"], indirect=True)
-def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client):
+@pytest.mark.asyncio
+async def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client):
# add some rule
rule = MappingRule(
id=1,
@@ -497,6 +526,7 @@ def test_enrichment(db_session, client, test_app, mock_alert_dto, elastic_client
@pytest.mark.parametrize("test_app", ["NO_AUTH"], indirect=True)
+@pytest.mark.asyncio
def test_disposable_enrichment(db_session, client, test_app, mock_alert_dto):
# SHAHAR: there is a voodoo so that you must do something with the db_session to kick it off
rule = MappingRule(
@@ -585,7 +615,8 @@ def test_disposable_enrichment(db_session, client, test_app, mock_alert_dto):
assert alert["status"] == "firing"
-def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto, db_session):
# Mock a TopologyService with dependencies to simulate the DB structure
mock_topology_service = TopologyService(
id=1, tenant_id="keep", service="test-service", display_name="Test Service"
@@ -604,6 +635,7 @@ def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto):
# Mock the session to return this topology mapping rule
mock_session.query.return_value.filter.return_value.all.return_value = [rule]
+ mock_session.db_session = db_session
# Initialize the EnrichmentsBl class with the mock session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -644,7 +676,8 @@ def test_topology_mapping_rule_enrichment(mock_session, mock_alert_dto):
)
-def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto, db_session):
# Setup a mapping rule with complex matchers
rule = MappingRule(
id=1,
@@ -670,6 +703,7 @@ def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
@@ -702,7 +736,8 @@ def test_run_mapping_rules_with_complex_matchers(mock_session, mock_alert_dto):
assert not hasattr(mock_alert_dto, "service")
-def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto):
+@pytest.mark.asyncio
+def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto, db_session):
# Setup a mapping rule with complex matchers and multiple enrichment fields
rule = MappingRule(
id=1,
@@ -724,6 +759,7 @@ def test_run_mapping_rules_enrichments_filtering(mock_session, mock_alert_dto):
mock_session.query.return_value.filter.return_value.filter.return_value.order_by.return_value.all.return_value = [
rule
]
+ mock_session.db_session = db_session
enrichment_bl = EnrichmentsBl(tenant_id="test_tenant", db=mock_session)
diff --git a/tests/test_parser.py b/tests/test_parser.py
index 6e607fe37..dba1a5eeb 100644
--- a/tests/test_parser.py
+++ b/tests/test_parser.py
@@ -19,16 +19,16 @@
from keep.step.step import Step
from keep.workflowmanager.workflowstore import WorkflowStore
-
-def test_parse_with_nonexistent_file(db_session):
+@pytest.mark.asyncio
+async def test_parse_with_nonexistent_file(db_session):
workflow_store = WorkflowStore()
# Expected error when a given input does not describe an existing file
with pytest.raises(HTTPException) as e:
- workflow_store.get_workflow(SINGLE_TENANT_UUID, "test-not-found")
+ await workflow_store.get_workflow(SINGLE_TENANT_UUID, "test-not-found")
assert e.value.status_code == 404
-
-def test_parse_with_nonexistent_url(monkeypatch):
+@pytest.mark.asyncio
+async def test_parse_with_nonexistent_url(monkeypatch):
# Mocking requests.get to always raise a ConnectionError
def mock_get(*args, **kwargs):
raise requests.exceptions.ConnectionError
@@ -37,7 +37,7 @@ def mock_get(*args, **kwargs):
workflow_store = WorkflowStore()
# Expected error when a given input does not describe an existing URL
with pytest.raises(requests.exceptions.ConnectionError):
- workflow_store.get_workflows_from_path(
+ await workflow_store.get_workflows_from_path(
SINGLE_TENANT_UUID, "https://ThisWebsiteDoNotExist.com"
)
@@ -46,10 +46,10 @@ def mock_get(*args, **kwargs):
workflow_path = str(path_to_test_resources / "db_disk_space_for_testing.yml")
providers_path = str(path_to_test_resources / "providers_for_testing.yaml")
-
-def test_parse_sanity_check(db_session):
+@pytest.mark.asyncio
+async def test_parse_sanity_check(db_session):
workflow_store = WorkflowStore()
- parsed_workflows = workflow_store.get_workflows_from_path(
+ parsed_workflows = await workflow_store.get_workflows_from_path(
SINGLE_TENANT_UUID, workflow_path, providers_path
)
assert parsed_workflows is not None
@@ -302,9 +302,10 @@ def test_parse_alert_steps(self):
class TestReusableActionWithWorkflow:
- def test_if_action_is_expanded(self, db_session):
+ @pytest.mark.asyncio
+ async def test_if_action_is_expanded(self, db_session):
workflow_store = WorkflowStore()
- workflows = workflow_store.get_workflows_from_path(
+ workflows = await workflow_store.get_workflows_from_path(
tenant_id=SINGLE_TENANT_UUID,
workflow_path=reusable_workflow_path,
providers_file=reusable_providers_path,
diff --git a/tests/test_steps.py b/tests/test_steps.py
index 4009680da..3fe508390 100644
--- a/tests/test_steps.py
+++ b/tests/test_steps.py
@@ -42,29 +42,37 @@ def sample_step():
return step
-
-def test_run_single(sample_step):
+@pytest.mark.asyncio
+async def test_run_single(sample_step):
# Simulate the result
- sample_step.provider.query = Mock(return_value="result")
+
+ async def result(*args, **kwargs):
+ return "result"
+
+ sample_step.provider.query = Mock(side_effect=result)
# Run the method
- result = sample_step._run_single()
+ result = await sample_step._run_single()
# Assertions
assert result is True # Action should run successfully
sample_step.provider.query.assert_called_with(param1="value1", param2="value2")
assert sample_step.provider.query.call_count == 1
+@pytest.mark.asyncio
+async def test_run_single_exception(sample_step):
-def test_run_single_exception(sample_step):
+ async def result(*args, **kwargs):
+ raise Exception("Test exception")
+
# Simulate an exception
- sample_step.provider.query = Mock(side_effect=Exception("Test exception"))
+ sample_step.provider.query = Mock(side_effect=result)
start_time = time.time()
# Run the method and expect an exception to be raised
with pytest.raises(StepError):
- sample_step._run_single()
+ await sample_step._run_single()
end_time = time.time()
execution_time = end_time - start_time
diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py
index f662a4bcf..a83686b7b 100644
--- a/tests/test_workflow_execution.py
+++ b/tests/test_workflow_execution.py
@@ -1,6 +1,7 @@
import asyncio
import json
import logging
+import threading
import time
from collections import defaultdict
from datetime import datetime, timedelta
@@ -75,30 +76,19 @@
Alert details: {{ alert }}"
"""
-
@pytest.fixture(scope="module")
def workflow_manager():
"""
Fixture to create and manage a WorkflowManager instance.
"""
manager = None
- try:
- from keep.workflowmanager.workflowscheduler import WorkflowScheduler
-
- scheduler = WorkflowScheduler(None)
- manager = WorkflowManager.get_instance()
- scheduler.workflow_manager = manager
- manager.scheduler = scheduler
- asyncio.run(manager.start())
- yield manager
- finally:
- if manager:
- try:
- manager.stop()
- # Give some time for threads to clean up
- time.sleep(1)
- except Exception as e:
- print(f"Error stopping workflow manager: {e}")
+ from keep.workflowmanager.workflowscheduler import WorkflowScheduler
+
+ scheduler = WorkflowScheduler(None)
+ manager = WorkflowManager.get_instance()
+ scheduler.workflow_manager = manager
+ manager.scheduler = scheduler
+ yield manager
@pytest.fixture
@@ -207,7 +197,8 @@ def setup_workflow_with_two_providers(db_session):
],
indirect=["test_app", "db_session"],
)
-def test_workflow_execution(
+@pytest.mark.asyncio
+async def test_workflow_execution(
db_session,
test_app,
create_alert,
@@ -243,6 +234,8 @@ def test_workflow_execution(
"""
base_time = datetime.now(tz=pytz.utc)
+ await workflow_manager.start()
+
# Create alerts with specified statuses and timestamps
alert_statuses.reverse()
for time_diff, status in alert_statuses:
@@ -251,7 +244,8 @@ def test_workflow_execution(
)
create_alert("fp1", alert_status, base_time - timedelta(minutes=time_diff))
- time.sleep(1)
+ await asyncio.sleep(1)
+
# Create the current alert
current_alert = AlertDto(
id="grafana-1",
@@ -263,25 +257,29 @@ def test_workflow_execution(
)
# Insert the current alert into the workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
# Wait for the workflow execution to complete
workflow_execution = None
count = 0
status = None
+ found = False
while (
- workflow_execution is None
- or workflow_execution.status == "in_progress"
- and count < 30
+ not found and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, "alert-time-check"
)
if workflow_execution is not None:
- status = workflow_execution.status
- time.sleep(1)
+ if ("send-slack-message-tier-1" in workflow_execution.results and
+ "send-slack-message-tier-2" in workflow_execution.results and
+ workflow_execution.status == "success"):
+ found = True
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
# Check if the workflow execution was successful
assert workflow_execution is not None
assert workflow_execution.status == "success"
@@ -298,6 +296,7 @@ def test_workflow_execution(
assert "Tier 2" in workflow_execution.results["send-slack-message-tier-2"][0]
+
workflow_definition2 = """workflow:
id: %s
description: send slack message only the first time an alert fires
@@ -386,7 +385,8 @@ def test_workflow_execution(
],
indirect=["test_app"],
)
-def test_workflow_execution_2(
+@pytest.mark.asyncio
+async def test_workflow_execution_2(
db_session,
test_app,
create_alert,
@@ -448,9 +448,11 @@ def test_workflow_execution_2(
)
# Insert the current alert into the workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
assert len(workflow_manager.scheduler.workflows_to_run) == 1
+ await workflow_manager.start()
+
# Wait for the workflow execution to complete
workflow_execution = None
count = 0
@@ -460,15 +462,17 @@ def test_workflow_execution_2(
or workflow_execution.status == "in_progress"
and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID,
workflow_id,
)
if workflow_execution is not None:
status = workflow_execution.status
- time.sleep(1)
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
assert len(workflow_manager.scheduler.workflows_to_run) == 0
# Check if the workflow execution was successful
assert workflow_execution is not None
@@ -531,7 +535,8 @@ def test_workflow_execution_2(
],
indirect=["test_app", "db_session"],
)
-def test_workflow_execution3(
+@pytest.mark.asyncio
+async def test_workflow_execution_3(
db_session,
test_app,
create_alert,
@@ -571,10 +576,11 @@ def test_workflow_execution3(
)
# sleep one second to avoid the case where tier0 alerts are not triggered
- time.sleep(1)
+ await asyncio.sleep(1)
# Insert the current alert into the workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.start()
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
# Wait for the workflow execution to complete
workflow_execution = None
@@ -585,14 +591,16 @@ def test_workflow_execution3(
or workflow_execution.status == "in_progress"
and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, "alert-first-time"
)
if workflow_execution is not None:
status = workflow_execution.status
- time.sleep(1)
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
# Check if the workflow execution was successful
assert workflow_execution is not None
@@ -645,7 +653,8 @@ def test_workflow_execution3(
],
indirect=["test_app"],
)
-def test_workflow_execution_with_disabled_workflow(
+@pytest.mark.asyncio
+async def test_workflow_execution_with_disabled_workflow(
db_session,
test_app,
create_alert,
@@ -694,31 +703,32 @@ def test_workflow_execution_with_disabled_workflow(
# Sleep one second to avoid the case where tier0 alerts are not triggered
time.sleep(1)
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.start()
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
enabled_workflow_execution = None
disabled_workflow_execution = None
count = 0
- while (
- (
- enabled_workflow_execution is None
- or enabled_workflow_execution.status == "in_progress"
- )
- and disabled_workflow_execution is None
- ) and count < 30:
- enabled_workflow_execution = get_last_workflow_execution_by_workflow_id(
+ found = False
+ while not found and count < 30:
+ enabled_workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, enabled_id
)
- disabled_workflow_execution = get_last_workflow_execution_by_workflow_id(
+ disabled_workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, disabled_id
)
- time.sleep(1)
+ if enabled_workflow_execution is not None and disabled_workflow_execution is None:
+ if enabled_workflow_execution.status == "success":
+ found = True
+
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
assert enabled_workflow_execution is not None
- assert enabled_workflow_execution.status == "success"
assert disabled_workflow_execution is None
@@ -773,7 +783,8 @@ def test_workflow_execution_with_disabled_workflow(
],
indirect=["test_app"],
)
-def test_workflow_incident_triggers(
+@pytest.mark.asyncio
+async def test_workflow_incident_triggers(
db_session,
test_app,
workflow_manager,
@@ -804,7 +815,7 @@ def test_workflow_incident_triggers(
# Insert the current alert into the workflow manager
- def wait_workflow_execution(workflow_id):
+ async def wait_workflow_execution(workflow_id):
# Wait for the workflow execution to complete
workflow_execution = None
count = 0
@@ -813,17 +824,17 @@ def wait_workflow_execution(workflow_id):
or workflow_execution.status == "in_progress"
and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, workflow_id
)
- time.sleep(1)
+ await asyncio.sleep(1)
count += 1
return workflow_execution
-
- workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "created")
+ await workflow_manager.start()
+ await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "created")
assert len(workflow_manager.scheduler.workflows_to_run) == 1
- workflow_execution_created = wait_workflow_execution(
+ workflow_execution_created = await wait_workflow_execution(
"incident-triggers-test-created-updated"
)
assert workflow_execution_created is not None
@@ -833,9 +844,9 @@ def wait_workflow_execution(workflow_id):
]
assert len(workflow_manager.scheduler.workflows_to_run) == 0
- workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "updated")
+ await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "updated")
assert len(workflow_manager.scheduler.workflows_to_run) == 1
- workflow_execution_updated = wait_workflow_execution(
+ workflow_execution_updated = await wait_workflow_execution(
"incident-triggers-test-created-updated"
)
assert workflow_execution_updated is not None
@@ -845,7 +856,7 @@ def wait_workflow_execution(workflow_id):
]
# incident-triggers-test-created-updated should not be triggered
- workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted")
+ await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted")
assert len(workflow_manager.scheduler.workflows_to_run) == 0
workflow_deleted = Workflow(
@@ -860,11 +871,11 @@ def wait_workflow_execution(workflow_id):
db_session.add(workflow_deleted)
db_session.commit()
- workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted")
+ await workflow_manager.insert_incident(SINGLE_TENANT_UUID, incident, "deleted")
assert len(workflow_manager.scheduler.workflows_to_run) == 1
# incident-triggers-test-deleted should be triggered now
- workflow_execution_deleted = wait_workflow_execution(
+ workflow_execution_deleted = await wait_workflow_execution(
"incident-triggers-test-deleted"
)
assert len(workflow_manager.scheduler.workflows_to_run) == 0
@@ -874,6 +885,7 @@ def wait_workflow_execution(workflow_id):
assert workflow_execution_deleted.results["mock-action"] == [
'"deleted incident: incident"\n'
]
+ await workflow_manager.stop()
logs_counter = {}
@@ -918,7 +930,8 @@ def fake_workflow_adapter(
],
indirect=["test_app", "db_session"],
)
-def test_workflow_execution_logs(
+@pytest.mark.asyncio
+async def test_workflow_execution_logs(
db_session,
test_app,
create_alert,
@@ -953,8 +966,9 @@ def test_workflow_execution_logs(
fingerprint="fp1",
)
+ await workflow_manager.start()
# Insert the current alert into the workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
# Wait for the workflow execution to complete
workflow_execution = None
@@ -964,12 +978,14 @@ def test_workflow_execution_logs(
or workflow_execution.status == "in_progress"
and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, "susu-and-sons"
)
- time.sleep(1)
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
# Check if the workflow execution was successful
assert workflow_execution is not None
assert workflow_execution.status == "success"
@@ -990,7 +1006,8 @@ def test_workflow_execution_logs(
],
indirect=["test_app", "db_session"],
)
-def test_workflow_execution_logs_log_level_debug_console_provider(
+@pytest.mark.asyncio
+async def test_workflow_execution_logs_log_level_debug_console_provider(
db_session,
test_app,
create_alert,
@@ -1034,7 +1051,8 @@ def test_workflow_execution_logs_log_level_debug_console_provider(
)
# Insert the current alert into the workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.start()
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
# Wait for the workflow execution to complete
workflow_execution = None
@@ -1045,12 +1063,14 @@ def test_workflow_execution_logs_log_level_debug_console_provider(
or workflow_execution.status == "in_progress"
and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, "susu-and-sons"
)
- time.sleep(1)
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
# Check if the workflow execution was successful
assert workflow_execution is not None
assert workflow_execution.status == "success"
@@ -1238,7 +1258,8 @@ def test_workflow_execution_logs_log_level_debug_console_provider(
],
indirect=["test_app", "db_session"],
)
-def test_alert_routing_policy(
+@pytest.mark.asyncio
+async def test_alert_routing_policy(
db_session,
test_app,
workflow_manager,
@@ -1278,29 +1299,31 @@ def test_alert_routing_policy(
monitor_name=alert_data["monitor_name"],
)
+ await workflow_manager.start()
# Insert the alert into workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
# Wait for workflow execution
workflow_execution = None
count = 0
+ found = False
while (
- workflow_execution is None
- or workflow_execution.status == "in_progress"
- and count < 30
+ not found and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, "alert-routing-policy"
)
if workflow_execution is not None and workflow_execution.status == "success":
- break
- time.sleep(1)
+ found = True
+ await asyncio.sleep(1)
count += 1
# Verify workflow execution
assert workflow_execution is not None
assert workflow_execution.status == "success"
+ await workflow_manager.stop()
+
# Check if the actions were triggered as expected
for action_name, expected_messages in expected_results.items():
if not expected_messages:
@@ -1424,7 +1447,8 @@ def test_alert_routing_policy(
],
indirect=["test_app", "db_session"],
)
-def test_nested_conditional_flow(
+@pytest.mark.asyncio
+async def test_nested_conditional_flow(
db_session,
test_app,
workflow_manager,
@@ -1459,7 +1483,8 @@ def test_nested_conditional_flow(
)
# Insert the alert into workflow manager
- workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+ await workflow_manager.start()
+ await workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
# Wait for workflow execution
workflow_execution = None
@@ -1469,7 +1494,7 @@ def test_nested_conditional_flow(
or workflow_execution.status == "in_progress"
and count < 30
):
- workflow_execution = get_last_workflow_execution_by_workflow_id(
+ workflow_execution = await get_last_workflow_execution_by_workflow_id(
SINGLE_TENANT_UUID, "nested-conditional-flow"
)
if workflow_execution is not None and workflow_execution.status == "success":
@@ -1478,9 +1503,11 @@ def test_nested_conditional_flow(
elif workflow_execution is not None and workflow_execution.status == "error":
raise Exception("Workflow execution failed")
- time.sleep(1)
+ await asyncio.sleep(1)
count += 1
+ await workflow_manager.stop()
+
# Verify workflow execution
assert workflow_execution is not None
assert workflow_execution.status == "success"
diff --git a/tests/test_workflowmanager.py b/tests/test_workflowmanager.py
index eb43abe15..02e928928 100644
--- a/tests/test_workflowmanager.py
+++ b/tests/test_workflowmanager.py
@@ -1,3 +1,5 @@
+import asyncio
+import pytest
import queue
from pathlib import Path
from unittest.mock import Mock, patch
@@ -26,9 +28,9 @@ def test_get_workflow_from_dict():
tenant_id = "test_tenant"
workflow_path = str(path_to_test_resources / "db_disk_space_for_testing.yml")
workflow_dict = workflow_store._parse_workflow_to_dict(workflow_path=workflow_path)
- result = workflow_store.get_workflow_from_dict(
+ result = asyncio.run(workflow_store.get_workflow_from_dict(
tenant_id=tenant_id, workflow=workflow_dict
- )
+ ))
mock_parser.parse.assert_called_once_with(tenant_id, workflow_dict)
assert result.id == "workflow1"
@@ -45,9 +47,9 @@ def test_get_workflow_from_dict_raises_exception():
workflow_dict = workflow_store._parse_workflow_to_dict(workflow_path=workflow_path)
with pytest.raises(HTTPException) as exc_info:
- workflow_store.get_workflow_from_dict(
+ asyncio.run(workflow_store.get_workflow_from_dict(
tenant_id=tenant_id, workflow=workflow_dict
- )
+ ))
assert exc_info.value.status_code == 500
assert exc_info.value.detail == "Unable to parse workflow from dict"