diff --git a/README.md b/README.md
index ad5b71199..e5a532806 100644
--- a/README.md
+++ b/README.md
@@ -122,12 +122,19 @@ Workflow triggers can either be executed manually when an alert is activated or
-
+
+
+
+
+
+
+
+
Databases and data warehouses
diff --git a/docs/api-ref/mapping/create-mapping.mdx b/docs/api-ref/mapping/create-mapping.mdx
new file mode 100644
index 000000000..6994f6aab
--- /dev/null
+++ b/docs/api-ref/mapping/create-mapping.mdx
@@ -0,0 +1,3 @@
+---
+openapi: post /mapping
+---
\ No newline at end of file
diff --git a/docs/api-ref/mapping/delete-mapping-by-id.mdx b/docs/api-ref/mapping/delete-mapping-by-id.mdx
new file mode 100644
index 000000000..52645c5dd
--- /dev/null
+++ b/docs/api-ref/mapping/delete-mapping-by-id.mdx
@@ -0,0 +1,3 @@
+---
+openapi: delete /mapping/{mapping_id}
+---
\ No newline at end of file
diff --git a/docs/api-ref/mapping/get-mappings.mdx b/docs/api-ref/mapping/get-mappings.mdx
new file mode 100644
index 000000000..b1ac11c0b
--- /dev/null
+++ b/docs/api-ref/mapping/get-mappings.mdx
@@ -0,0 +1,3 @@
+---
+openapi: get /mapping
+---
\ No newline at end of file
diff --git a/docs/cli/commands/cli-config-new.mdx b/docs/cli/commands/cli-config-new.mdx
new file mode 100644
index 000000000..cee753e18
--- /dev/null
+++ b/docs/cli/commands/cli-config-new.mdx
@@ -0,0 +1,56 @@
+---
+sidebarTitle: "keep config new"
+---
+
+Create new config.
+
+## Usage
+
+```
+Usage: keep config new [OPTIONS]...
+```
+
+## Options
+* `interactive`:
+ * Type: BOOL
+ * Default: `True`
+ * Usage: `--interactive`
+
+ Create config interactively.
+
+* `url`:
+ * Type: STRING
+ * Default: `http://localhost:8080`
+ * Usage: `--url`
+
+ The URL of the Keep backend server.
+
+* `api-key`:
+ * Type: STRING
+ * Default: ``
+ * Usage: `--api-key`
+
+ The api key for authenticating over keep.
+
+* `help`:
+ * Type: BOOL
+ * Default: `false`
+ * Usage: `--help`
+
+ Show this message and exit.
+
+
+
+## CLI Help
+
+```
+Usage: keep config new [OPTIONS]
+
+ create new config.
+
+Options:
+ -u, --url TEXT The url of the keep api
+ -a, --api-key TEXT The api key for keep
+ -i, --interactive Interactive mode creating keep config (default True)
+ --help Show this message and exit.
+```
diff --git a/docs/cli/commands/cli-config-provider.mdx b/docs/cli/commands/cli-config-provider.mdx
deleted file mode 100644
index dc4d8ba1d..000000000
--- a/docs/cli/commands/cli-config-provider.mdx
+++ /dev/null
@@ -1,68 +0,0 @@
----
-title: "keep config provider"
-sidebarTitle: "keep config provider"
----
-
-Set the provider configuration.
-
-## Usage
-
-```
-Usage: keep config provider [OPTIONS]
-```
-
-## Options
-* `provider_type` (REQUIRED):
- * Type: STRING
- * Default: `none`
- * Usage: `--provider-type
--p`
-
- The provider to configure [e.g. elastic]
-
-
-* `provider_id` (REQUIRED):
- * Type: STRING
- * Default: `none`
- * Usage: `--provider-id
--i`
-
- The provider unique identifier [e.g. elastic-prod]
-
-
-* `provider_config_file`:
- * Type: STRING
- * Default: `providers.yaml`
- * Usage: `--provider-config-file
--c`
-
- The provider config
-
-
-* `help`:
- * Type: BOOL
- * Default: `false`
- * Usage: `--help`
-
- Show this message and exit.
-
-
-
-## CLI Help
-
-```
-Usage: keep config provider [OPTIONS]
-
- Set the provider configuration.
-
-Options:
- -p, --provider-type TEXT The provider to configure [e.g. elastic]
- [required]
-
- -i, --provider-id TEXT The provider unique identifier [e.g.
- elastic-prod] [required]
-
- -c, --provider-config-file TEXT
- The provider config
- --help Show this message and exit.
-```
diff --git a/docs/cli/commands/cli-config-show.mdx b/docs/cli/commands/cli-config-show.mdx
new file mode 100644
index 000000000..a217484b6
--- /dev/null
+++ b/docs/cli/commands/cli-config-show.mdx
@@ -0,0 +1,32 @@
+---
+sidebarTitle: "keep config show"
+---
+
+Show keep configuration.
+
+## Usage
+
+```
+Usage: keep config show [OPTIONS]...
+```
+
+## Options
+* `help`:
+ * Type: BOOL
+ * Default: `false`
+ * Usage: `--help`
+
+ Show this message and exit.
+
+
+
+## CLI Help
+
+```
+Usage: keep config show [OPTIONS]
+
+ show the current config.
+
+Options:
+ --help Show this message and exit.
+```
diff --git a/docs/cli/commands/cli-config.mdx b/docs/cli/commands/cli-config.mdx
index 485611984..c5c7d8cf9 100644
--- a/docs/cli/commands/cli-config.mdx
+++ b/docs/cli/commands/cli-config.mdx
@@ -26,11 +26,12 @@ Usage: keep config [OPTIONS] COMMAND [ARGS]...
```
Usage: keep config [OPTIONS] COMMAND [ARGS]...
- Set keep configuration.
+ Manage the config.
Options:
--help Show this message and exit.
Commands:
- provider Set the provider configuration.
+ new create new config.
+ show show the current config.
```
diff --git a/docs/cli/commands/mappings-create.mdx b/docs/cli/commands/mappings-create.mdx
new file mode 100644
index 000000000..7cb711795
--- /dev/null
+++ b/docs/cli/commands/mappings-create.mdx
@@ -0,0 +1,75 @@
+---
+sidebarTitle: "keep mappings create"
+---
+
+Create a mapping rule.
+
+## Usage
+
+```
+Usage: keep mappings delete [OPTIONS]
+```
+
+## Options
+
+* `name`
+ * Type: STRING
+ * Default: ``
+ * Usage: `--name `
+
+ The name of the mapping.
+
+* `description`
+ * Type: STRING
+ * Default: ``
+ * Usage: `--description `
+
+ The description of the mapping.
+
+* `file`
+ * Type: STRING
+ * Default: ``
+ * Usage: `--file `
+
+ The mapping file. Must be a CSV file.
+
+* `matchers`
+ * Type: STRING
+ * Default: ``
+ * Usage: `--matchers `
+
+ The matchers of the mapping, as a comma-separated list of strings.
+
+* `priority`
+ * Type: INTEGER RANGE
+ * Default: `0`
+ * Usage: `--priority `
+
+ The priority of the mapping, higher priority means this rule will execute first. [0<=x<=100].
+
+* `help`:
+ * Type: BOOL
+ * Default: `false`
+ * Usage: `--help`
+
+ Show this message and exit.
+
+## CLI Help
+
+```
+Usage: keep mappings create [OPTIONS]
+
+ Create a mapping rule.
+
+Options:
+ -n, --name TEXT The name of the mapping. [required]
+ -d, --description TEXT The description of the mapping.
+ -f, --file PATH The mapping file. Must be a CSV file.
+ [required]
+ -m, --matchers TEXT The matchers of the mapping, as a comma-
+ separated list of strings. [required]
+ -p, --priority INTEGER RANGE The priority of the mapping, higher priority
+ means this rule will execute first.
+ [0<=x<=100]
+ --help Show this message and exit.
+```
diff --git a/docs/cli/commands/mappings-delete.mdx b/docs/cli/commands/mappings-delete.mdx
new file mode 100644
index 000000000..fc54d57d7
--- /dev/null
+++ b/docs/cli/commands/mappings-delete.mdx
@@ -0,0 +1,41 @@
+---
+sidebarTitle: "keep mappings delete"
+---
+
+Delete a mapping with a specified ID.
+
+## Usage
+
+```
+Usage: keep mappings delete [OPTIONS]
+```
+
+## Options
+
+* `mapping-id`
+ * Type: STRING
+ * Default: ``
+ * Usage: `--mapping-id `
+
+ The ID of the mapping to delete.
+
+* `help`:
+ * Type: BOOL
+ * Default: `false`
+ * Usage: `--help`
+
+ Show this message and exit.
+
+
+
+## CLI Help
+
+```
+Usage: keep mappings delete [OPTIONS]
+
+ Delete a mapping with a specified ID
+
+Options:
+ --mapping-id INTEGER The ID of the mapping to delete. [required]
+ --help Show this message and exit.
+```
diff --git a/docs/cli/commands/mappings-list.mdx b/docs/cli/commands/mappings-list.mdx
new file mode 100644
index 000000000..79558047d
--- /dev/null
+++ b/docs/cli/commands/mappings-list.mdx
@@ -0,0 +1,33 @@
+---
+sidebarTitle: "keep mappings list"
+---
+
+List mappings.
+
+## Usage
+
+```
+Usage: keep mappings [OPTIONS]
+```
+
+List mappings.
+
+## Options
+
+* `help`:
+ * Type: BOOL
+ * Default: `false`
+ * Usage: `--help`
+
+ Show this message and exit.
+
+## CLI Help
+
+```
+Usage: keep mappings list [OPTIONS]
+
+ List mappings.
+
+Options:
+ --help Show this message and exit.
+```
diff --git a/docs/cli/installation.mdx b/docs/cli/installation.mdx
index 469c46178..3020dd471 100644
--- a/docs/cli/installation.mdx
+++ b/docs/cli/installation.mdx
@@ -2,6 +2,14 @@
title: "Installation"
---
Missing an installation? submit a new installation request and we will add it as soon as we can.
+
+
+We recommend to install Keep CLI with Python version 3.11 for optimal compatibility and performance.
+This choice ensures seamless integration with all dependencies, including pyarrow, which currently does not support Python 3.12
+
+
+Need Keep CLI on other versions? Feel free to contact us!
+
## Clone and install (Option 1)
### Install
diff --git a/docs/deployment/ecs.mdx b/docs/deployment/ecs.mdx
new file mode 100644
index 000000000..097092e1a
--- /dev/null
+++ b/docs/deployment/ecs.mdx
@@ -0,0 +1,154 @@
+---
+title: "AWS ECS"
+sidebarTitle: "AWS ECS"
+---
+
+## Step 1: Login to AWS Console
+- Open your web browser and navigate to the AWS Management Console.
+- Log in using your AWS account credentials.
+
+## Step 2: Navigate to ECS
+- Click on the "Services" dropdown menu in the top left corner.
+- Select "ECS" from the list of services.
+
+## Step 3: Create 3 Task Definitions
+- In the ECS dashboard, navigate to the "Task Definitions" section in the left sidebar.
+
+- Click on "Create new Task Definition".
+ ![Create new task definition](/images/ecs-task-def-create-new.png)
+
+ ### Task Definition 1 (Frontend - KeepUI):
+
+ - Task Definition Family: keep-frontend
+ ![Task Definition Family](/images/ecs-task-def-frontend1.png)
+ - Configure your container definitions as below:
+ - Infrastructure Requirements:
+ - Launch Type: AWS Fargate
+ - OS, Architecture, Network mode: Linux/X86_64
+ - Task Size:
+ - CPU: 1 vCPU
+ - Memory: 2 GB
+ - Task Role and Task Execution Role are optional if you plan on using secrets manager for example then create a task execution role to allow access to the secret manager you created.
+ ![Infrastructure Requirements](/images/ecs-task-def-frontend2.png)
+ - Container Details:
+ - Name: keep-frontend
+ - Image URI: us-central1-docker.pkg.dev/keephq/keep/keep-api:latest
+ - Ports Mapping:
+ - Container Port: 3000
+ - Protocol: TCP
+ ![Container Details](/images/ecs-task-def-frontend3.png)
+ - Environment Variables: (This can be static or you can use parameter store or secrets manager)
+ - DATABASE_CONNECTION_STRING
+ - AUTH_TYPE
+ - KEEP_JWT_SECRET
+ - KEEP_DEFAULT_USERNAME
+ - KEEP_DEFAULT_PASSWORD
+ - SECRET_MANAGER_TYPE
+ - SECRET_MANAGER_DIRECTORY
+ - USE_NGROK
+ - KEEP_API_URL
+ (The below variable is optional if you don't want to use websocket)
+ - PUSHER_DISABLED
+ (The below variables are optional if you want to use websocket)
+ - PUSHER_APP_ID
+ - PUSHER_APP_KEY
+ - PUSHER_APP_SECRET
+ - PUSHER_HOST
+ - PUSHER_PORT
+ ![Environment Variables](/images/ecs-task-def-frontend4.png)
+ - Review and create your task definition.
+
+ ### Task Definition 2 (Backend - keepAPI):
+
+ - Configure your container definitions as below:
+ - Task Definition Family: keep-frontend
+ ![Task Definition Family](/images/ecs-task-def-backend1.png)
+ - Infrastructure Requirements:
+ - Launch Type: AWS Fargate
+ - OS, Architecture, Network mode: Linux/X86_64
+ - Task Size:
+ - CPU: 1 vCPU
+ - Memory: 2 GB
+ - Task Role and Task Execution Role are optional if you plan on using secrets manager for example then create a task execution role to allow access to the secret manager you created.
+ ![Infrastructure Requirements](/images/ecs-task-def-backend2.png)
+ - Container Details:
+ - Name: keep-backend
+ - Image URI: us-central1-docker.pkg.dev/keephq/keep/keep-api:latest
+ - Ports Mapping:
+ - Container Port: 8080
+ - Protocol: TCP
+ ![Container Details](/images/ecs-task-def-backend3.png)
+ - Environment Variables: (This can be static or you can use parameter store or secrets manager)
+ - DATABASE_CONNECTION_STRING
+ - AUTH_TYPE
+ - KEEP_JWT_SECRET
+ - KEEP_DEFAULT_USERNAME
+ - KEEP_DEFAULT_PASSWORD
+ - SECRET_MANAGER_TYPE
+ - SECRET_MANAGER_DIRECTORY
+ - USE_NGROK
+ - KEEP_API_URL
+ (The below variable is optional if you don't want to use websocket)
+ - PUSHER_DISABLED
+ (The below variables are optional if you want to use websocket)
+ - PUSHER_APP_ID
+ - PUSHER_APP_KEY
+ - PUSHER_APP_SECRET
+ - PUSHER_HOST
+ - PUSHER_PORT
+ ![Environment Variables](/images/ecs-task-def-backend4.png)
+ - Storage:
+ - Volume Name: keep-efs
+ - Configuration Type: Configure at task definition creation
+ - Volume type: EFS
+ - Storage configurations:
+ - File system ID: Select an exisiting EFS filesystem or create a new one
+ - Root Directory: /
+ ![Volume Configuration](/images/ecs-task-def-backend5.png)
+ - Container mount points:
+ - Container: select the container you just created
+ - Source volume: keep-efs
+ - Container path: /app
+ - Make sure that Readonly is not selected
+ ![Container Mount](/images/ecs-task-def-backend6.png)
+ - Review and create your task definition.
+
+ ### Task Definition 3 (Websocket): (This step is optional if you want to have automatic refresh of the alerts feed)
+
+ - Configure your container definitions as below:
+ - Task Definition Family: keep-frontend
+ ![Task Definition Family](/images/ecs-task-def-websocket1.png)
+ - Infrastructure Requirements:
+ - Launch Type: AWS Fargate
+ - OS, Architecture, Network mode: Linux/X86_64
+ - Task Size:
+ - CPU: 0.25 vCPU
+ - Memory: 1 GB
+ - Task Role and Task Execution Role are optional if you plan on using secrets manager for example then create a task execution role to allow access to the secret manager you created.
+ ![Infrastructure Requirements](/images/ecs-task-def-websocket2.png)
+ - Container Details:
+ - Name: keep-websocket
+ - Image URI: quay.io/soketi/soketi:1.4-16-debian
+ - Ports Mapping:
+ - Container Port: 6001
+ - Protocol: TCP
+ ![Container Details](/images/ecs-task-def-websocket3.png)
+ - Environment Variables: (This can be static or you can use parameter store or secrets manager)
+ - SOKETI_DEBUG
+ - SOKETI_DEFAULT_APP_ID
+ - SOKETI_DEFAULT_APP_KEY
+ - SOKETI_DEFAULT_APP_SECRET
+ - SOKETI_USER_AUTHENTICATION_TIMEOUT
+ ![Environment Variables](/images/ecs-task-def-websocket4.png)
+ - Review and create your task definition.
+
+## Step 4: Create Keep Service
+- In the ECS dashboard, navigate to the "Clusters" section in the left sidebar.
+- Select the cluster you want to deploy your service to.
+- Click on the "Create" button next to "Services".
+- Configure your service settings.
+- Review and create your service.
+
+## Step 5: Monitor Your Service
+- Once your service is created, monitor its status in the ECS dashboard.
+- You can view task status, service events, and other metrics to ensure your service is running correctly.
diff --git a/docs/images/azuremonitoring_1.png b/docs/images/azuremonitoring_1.png
new file mode 100644
index 000000000..b9636b27f
Binary files /dev/null and b/docs/images/azuremonitoring_1.png differ
diff --git a/docs/images/azuremonitoring_2.png b/docs/images/azuremonitoring_2.png
new file mode 100644
index 000000000..58b26d436
Binary files /dev/null and b/docs/images/azuremonitoring_2.png differ
diff --git a/docs/images/azuremonitoring_3.png b/docs/images/azuremonitoring_3.png
new file mode 100644
index 000000000..c76740efd
Binary files /dev/null and b/docs/images/azuremonitoring_3.png differ
diff --git a/docs/images/azuremonitoring_4.png b/docs/images/azuremonitoring_4.png
new file mode 100644
index 000000000..b068c8a81
Binary files /dev/null and b/docs/images/azuremonitoring_4.png differ
diff --git a/docs/images/azuremonitoring_5.png b/docs/images/azuremonitoring_5.png
new file mode 100644
index 000000000..0935013c6
Binary files /dev/null and b/docs/images/azuremonitoring_5.png differ
diff --git a/docs/images/azuremonitoring_6.png b/docs/images/azuremonitoring_6.png
new file mode 100644
index 000000000..3ee7ab07b
Binary files /dev/null and b/docs/images/azuremonitoring_6.png differ
diff --git a/docs/images/azuremonitoring_7.png b/docs/images/azuremonitoring_7.png
new file mode 100644
index 000000000..546a5a327
Binary files /dev/null and b/docs/images/azuremonitoring_7.png differ
diff --git a/docs/images/ecs-task-def-backend1.png b/docs/images/ecs-task-def-backend1.png
new file mode 100644
index 000000000..cd79c1429
Binary files /dev/null and b/docs/images/ecs-task-def-backend1.png differ
diff --git a/docs/images/ecs-task-def-backend2.png b/docs/images/ecs-task-def-backend2.png
new file mode 100644
index 000000000..e6ff04309
Binary files /dev/null and b/docs/images/ecs-task-def-backend2.png differ
diff --git a/docs/images/ecs-task-def-backend3.png b/docs/images/ecs-task-def-backend3.png
new file mode 100644
index 000000000..917fd2f1a
Binary files /dev/null and b/docs/images/ecs-task-def-backend3.png differ
diff --git a/docs/images/ecs-task-def-backend4.png b/docs/images/ecs-task-def-backend4.png
new file mode 100644
index 000000000..ba7b8750b
Binary files /dev/null and b/docs/images/ecs-task-def-backend4.png differ
diff --git a/docs/images/ecs-task-def-backend5.png b/docs/images/ecs-task-def-backend5.png
new file mode 100644
index 000000000..eaef8d56c
Binary files /dev/null and b/docs/images/ecs-task-def-backend5.png differ
diff --git a/docs/images/ecs-task-def-backend6.png b/docs/images/ecs-task-def-backend6.png
new file mode 100644
index 000000000..e47b91ca7
Binary files /dev/null and b/docs/images/ecs-task-def-backend6.png differ
diff --git a/docs/images/ecs-task-def-create-new.png b/docs/images/ecs-task-def-create-new.png
new file mode 100644
index 000000000..9cfd0904a
Binary files /dev/null and b/docs/images/ecs-task-def-create-new.png differ
diff --git a/docs/images/ecs-task-def-create.png b/docs/images/ecs-task-def-create.png
new file mode 100644
index 000000000..ef309433b
Binary files /dev/null and b/docs/images/ecs-task-def-create.png differ
diff --git a/docs/images/ecs-task-def-frontend1.png b/docs/images/ecs-task-def-frontend1.png
new file mode 100644
index 000000000..2744431c4
Binary files /dev/null and b/docs/images/ecs-task-def-frontend1.png differ
diff --git a/docs/images/ecs-task-def-frontend2.png b/docs/images/ecs-task-def-frontend2.png
new file mode 100644
index 000000000..135d8b8ea
Binary files /dev/null and b/docs/images/ecs-task-def-frontend2.png differ
diff --git a/docs/images/ecs-task-def-frontend3.png b/docs/images/ecs-task-def-frontend3.png
new file mode 100644
index 000000000..8500e73b4
Binary files /dev/null and b/docs/images/ecs-task-def-frontend3.png differ
diff --git a/docs/images/ecs-task-def-frontend4.png b/docs/images/ecs-task-def-frontend4.png
new file mode 100644
index 000000000..3402d3d66
Binary files /dev/null and b/docs/images/ecs-task-def-frontend4.png differ
diff --git a/docs/images/ecs-task-def-websocket1.png b/docs/images/ecs-task-def-websocket1.png
new file mode 100644
index 000000000..3e0fd65c9
Binary files /dev/null and b/docs/images/ecs-task-def-websocket1.png differ
diff --git a/docs/images/ecs-task-def-websocket2.png b/docs/images/ecs-task-def-websocket2.png
new file mode 100644
index 000000000..daaf65566
Binary files /dev/null and b/docs/images/ecs-task-def-websocket2.png differ
diff --git a/docs/images/ecs-task-def-websocket3.png b/docs/images/ecs-task-def-websocket3.png
new file mode 100644
index 000000000..a1c4e32aa
Binary files /dev/null and b/docs/images/ecs-task-def-websocket3.png differ
diff --git a/docs/images/ecs-task-def-websocket4.png b/docs/images/ecs-task-def-websocket4.png
new file mode 100644
index 000000000..84fb54e1b
Binary files /dev/null and b/docs/images/ecs-task-def-websocket4.png differ
diff --git a/docs/images/gcpmonitoring_1.png b/docs/images/gcpmonitoring_1.png
new file mode 100644
index 000000000..4bd027e11
Binary files /dev/null and b/docs/images/gcpmonitoring_1.png differ
diff --git a/docs/images/gcpmonitoring_2.png b/docs/images/gcpmonitoring_2.png
new file mode 100644
index 000000000..561213c40
Binary files /dev/null and b/docs/images/gcpmonitoring_2.png differ
diff --git a/docs/images/gcpmonitoring_3.png b/docs/images/gcpmonitoring_3.png
new file mode 100644
index 000000000..f56b62400
Binary files /dev/null and b/docs/images/gcpmonitoring_3.png differ
diff --git a/docs/images/gcpmonitoring_4.png b/docs/images/gcpmonitoring_4.png
new file mode 100644
index 000000000..b9bf21d51
Binary files /dev/null and b/docs/images/gcpmonitoring_4.png differ
diff --git a/docs/images/gcpmonitoring_5.png b/docs/images/gcpmonitoring_5.png
new file mode 100644
index 000000000..dc984d0bc
Binary files /dev/null and b/docs/images/gcpmonitoring_5.png differ
diff --git a/docs/images/gcpmonitoring_6.png b/docs/images/gcpmonitoring_6.png
new file mode 100644
index 000000000..25dcbea49
Binary files /dev/null and b/docs/images/gcpmonitoring_6.png differ
diff --git a/docs/images/presets/convert-to-cel.png b/docs/images/presets/convert-to-cel.png
new file mode 100644
index 000000000..3d15417a1
Binary files /dev/null and b/docs/images/presets/convert-to-cel.png differ
diff --git a/docs/images/presets/converted-sql-to-cel.png b/docs/images/presets/converted-sql-to-cel.png
new file mode 100644
index 000000000..15d54b213
Binary files /dev/null and b/docs/images/presets/converted-sql-to-cel.png differ
diff --git a/docs/images/presets/import-from-sql.png b/docs/images/presets/import-from-sql.png
new file mode 100644
index 000000000..7ac2002b5
Binary files /dev/null and b/docs/images/presets/import-from-sql.png differ
diff --git a/docs/images/presets/invalid-sentry-cel.png b/docs/images/presets/invalid-sentry-cel.png
new file mode 100644
index 000000000..5803ac556
Binary files /dev/null and b/docs/images/presets/invalid-sentry-cel.png differ
diff --git a/docs/images/presets/preset-created.png b/docs/images/presets/preset-created.png
new file mode 100644
index 000000000..5a053239d
Binary files /dev/null and b/docs/images/presets/preset-created.png differ
diff --git a/docs/images/presets/save-preset-modal.png b/docs/images/presets/save-preset-modal.png
new file mode 100644
index 000000000..6e85c1498
Binary files /dev/null and b/docs/images/presets/save-preset-modal.png differ
diff --git a/docs/images/presets/save-preset.png b/docs/images/presets/save-preset.png
new file mode 100644
index 000000000..6a83825a1
Binary files /dev/null and b/docs/images/presets/save-preset.png differ
diff --git a/docs/images/presets/valid-sentry-cel.png b/docs/images/presets/valid-sentry-cel.png
new file mode 100644
index 000000000..6e0e9a9a9
Binary files /dev/null and b/docs/images/presets/valid-sentry-cel.png differ
diff --git a/docs/mint.json b/docs/mint.json
index 877f3f58a..3268264a7 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -30,6 +30,7 @@
"overview/keyconcepts",
"overview/usecases",
"overview/ruleengine",
+ "overview/presets",
{
"group": "Enrichments",
"pages": ["overview/enrichment/mapping"]
@@ -53,7 +54,8 @@
"deployment/secret-manager",
"deployment/docker",
"deployment/kubernetes",
- "deployment/openshift"
+ "deployment/openshift",
+ "deployment/ecs"
]
},
{
@@ -79,6 +81,7 @@
"pages": [
"providers/documentation/aks-provider",
"providers/documentation/axiom-provider",
+ "providers/documentation/azuremonitoring-provider",
"providers/documentation/cloudwatch-logs",
"providers/documentation/cloudwatch-metrics",
"providers/documentation/console-provider",
@@ -87,6 +90,7 @@
"providers/documentation/kibana-provider",
"providers/documentation/discord-provider",
"providers/documentation/elastic-provider",
+ "providers/documentation/gcpmonitoring-provider",
"providers/documentation/grafana-provider",
"providers/documentation/grafana-oncall-provider",
"providers/documentation/http-provider",
@@ -255,6 +259,14 @@
}
]
},
+ {
+ "group": "keep mappings",
+ "pages": [
+ "cli/commands/mappings-list",
+ "cli/commands/mappings-create",
+ "cli/commands/mappings-delete"
+ ]
+ },
"cli/commands/cli-api",
"cli/commands/cli-config",
"cli/commands/cli-version",
diff --git a/docs/overview/presets.mdx b/docs/overview/presets.mdx
new file mode 100644
index 000000000..3480ef040
--- /dev/null
+++ b/docs/overview/presets.mdx
@@ -0,0 +1,81 @@
+---
+description: "CEL-Based Alert Filtering"
+title: "Presets"
+---
+
+With Keep's introduction of CEL (Common Expression Language) for alert filtering, users gain the flexibility to define more complex and precise alert filtering logic. This feature allows the creation of customizable filters using CEL expressions to refine alert visibility based on specific criteria.
+
+## Introduction
+
+CEL-based filtering offers a powerful method for users to specify conditions under which alerts should be shown. Through a combination of logical, comparison, and string operations, alerts can be filtered to meet the exact needs of the user, improving the focus and efficiency of alert management.
+
+## How It Works
+
+1. **CEL Expression Creation**: Users craft CEL expressions that define the filtering criteria for alerts.
+2. **Preset Definition**: These expressions can be saved as presets for easy application to different alert streams.
+3. **Alert Filtering**: When applied, the CEL expressions evaluate each alert against the defined criteria, filtering the alert stream in real-time.
+
+## Practical Example
+
+For instance, a user could create a CEL expression to filter alerts by severity and source, such as `severity == 'critical' && service.contains('database')`, ensuring only critical alerts from database services are displayed.
+
+## Core Concepts
+
+- **CEL Expressions**: The CEL language syntax used to define alert filtering logic.
+- **Presets**: Saved CEL expressions that can be reused across different alert streams.
+- **Real-Time Filtering**: The dynamic application of CEL expressions to incoming alerts.
+
+## Creating a CEL Expression
+
+There is generally two ways of creating a CEL expression in Keep
+### Importing from an SQL query
+
+1. Click on the "Import from SQL" button
+
+
+
+2. Write/Paste your SQL query and hit the "Convert to CEL" button
+
+
+
+Which in turn will generate and apply a valid CEL query:
+
+
+
+
+### Manually creating CEL query
+
+Use the [CEL Language Definition](https://github.com/google/cel-spec/blob/master/doc/langdef.md) documentation to better understand the capabilities of the Common Expression Language
+This is an example of how to query all the alerts that came from `Sentry`
+
+
+
+If the CEL syntax you typed in is invalid, an error message will show up (in this case, we used invalid `''` instead of `""`):
+
+
+
+
+## Save Presets
+
+You can save your CEL queries into a `Preset` using the "Save current filter as a view" button
+
+
+
+You can name your `Preset` and configure whether it is "Private" (only the creating user will see this Preset) or account-wide available.
+
+
+
+The `Preset` will then be created and available for you to quickly navigate and used
+
+
+
+
+## Best Practices
+
+- **Specificity in Expressions**: Craft expressions that precisely target the desired alerts to avoid filtering out relevant alerts.
+- **Presets Management**: Regularly review and update your presets to align with evolving alerting needs.
+- **Testing Expressions**: Before applying, test CEL expressions to ensure they correctly filter the desired alerts.
+
+## Useful Links
+- [Common Expression Language](https://github.com/google/cel-spec?tab=readme-ov-file)
+- [CEL Language Definition](https://github.com/google/cel-spec/blob/master/doc/langdef.md)
diff --git a/docs/providers/documentation/azuremonitoring-provider.mdx b/docs/providers/documentation/azuremonitoring-provider.mdx
new file mode 100644
index 000000000..f47a54be8
--- /dev/null
+++ b/docs/providers/documentation/azuremonitoring-provider.mdx
@@ -0,0 +1,78 @@
+---
+title: "Azure Monitoring"
+sidebarTitle: "Azure Monitoring Provider"
+description: "Azure Monitoring provider allows you to get alerts from Azure Monitoring via webhooks."
+---
+
+## Overview
+
+The Azure Monitoring Provider integrates Keep with Azure Monitoring, allowing you to receive alerts within Keep's platform. By setting up a webhook in Azure, you can ensure that critical alerts are sent to Keep, allowing for efficient monitoring and response.
+
+## Connecting Azure Monitoring to Keep
+
+Connecting Azure Monitoring to Keep involves creating an Action Group in Azure, adding a webhook action, and configuring the Alert Rule to use the new Action Group.
+
+### Step 1: Navigate an Action Group
+1. Log in to your Azure portal.
+2. Navigate to **Monitor** > **Alerts** > **Action groups**.
+
+
+
+
+
+### Step 2: Create new Action Group
+1. Click on **+ Create**.
+
+
+
+
+
+
+### Step 3: Fill Action Group details
+1. Choose the Subscription and Resource Group.
+2. Give the Action Group an indicative name.
+
+
+
+
+
+### Step 4: Go to "Action" and add Keep as a Webhook
+
+
+
+
+
+### Step 5: Test Keep Webhook action
+
+
+
+
+
+
+
+
+
+### Step 6: View the alert in Keep
+
+
+
+
+
+## Useful Links
+- [Azure Monitor alert webhook](https://learn.microsoft.com/en-us/azure/azure-monitor/alerts/alerts-webhooks)
+- [Azure Monitor alert payload](https://learn.microsoft.com/en-us/azure/azure-monitor/alerts/alerts-payload-samples)
+- [Azure Monitor action groups](https://learn.microsoft.com/en-us/azure/azure-monitor/alerts/action-groups)
diff --git a/docs/providers/documentation/gcpmonitoring-provider.mdx b/docs/providers/documentation/gcpmonitoring-provider.mdx
new file mode 100644
index 000000000..8a31e79c0
--- /dev/null
+++ b/docs/providers/documentation/gcpmonitoring-provider.mdx
@@ -0,0 +1,78 @@
+---
+title: "GCP Monitoring"
+sidebarTitle: "GCP Monitoring Provider"
+description: "GCP Monitoringing provider allows you to get alerts from Azure Monitoring via webhooks."
+---
+
+## Overview
+The GCP Monitoring Provider enables seamless integration between Keep and GCP Monitoring, allowing alerts from GCP Monitoring to be directly sent to Keep through webhook configurations. This integration ensures that critical alerts are efficiently managed and responded to within Keep's platform.
+
+## Connecting GCP Monitoring to Keep
+To connect GCP Monitoring to Keep, you'll need to configure a webhook as a notification channel in GCP Monitoring and then link it to the desired alert policy.
+
+### Step 1: Access Notification Channels
+Log in to the Google Cloud Platform console.
+Navigate to **Monitoring > Alerting > Notification channels**.
+
+
+
+
+
+### Step 2: Add a New Webhook
+Within the Webhooks section, click on **ADD NEW**.
+
+
+
+
+
+### Step 3: Configure the Webhook
+In the Endpoint URL field, enter the webhook URL provided by Keep.
+- For Display Name, use keep-gcpmonitoring-webhook-integration.
+- Enable Use HTTP Basic Auth and input the following credentials:
+ - Auth Username: **api_key**
+ - Auth Password: **%YOURAPIKEY%**
+
+
+
+
+
+### Step 4: Save the Webhook Configuration
+- Click on Save to store the webhook configuration.
+
+### Step 5: Associate the Webhook with an Alert Policy
+
+Navigate to the alert policy you wish to send notifications from to Keep.
+- Click on Edit.
+- Under "Notifications and name," find the Notification Channels section and select the keep-gcpmonitoring-webhook-integration channel you created.
+- Save the changes by clicking on SAVE POLICY.
+
+
+
+
+
+
+
+
+
+
+### Step 6: Review the alert in Keep
+
+
+
+
+
+### Useful Links
+ - [GCP Monitoring Notification Channels](https://cloud.google.com/monitoring/support/notification-options)
+ - [GCP Monitoring Alerting](https://cloud.google.com/monitoring/alerts)
diff --git a/docs/providers/documentation/sentry-provider.mdx b/docs/providers/documentation/sentry-provider.mdx
index 09bfd7575..cf1f80fcd 100644
--- a/docs/providers/documentation/sentry-provider.mdx
+++ b/docs/providers/documentation/sentry-provider.mdx
@@ -15,6 +15,10 @@ The `api_key` and `organization_slug` are required for connecting to the Sentry
`project_slug` is if you want to connect Sentry to a specific project within an organization.
+
+To connect self hosted Sentry, you need to set the `api_url` parameter. Default value is `https://sentry.io/api/0/`.
+
+
## Connecting with the Provider
### API Key
diff --git a/docs/providers/documentation/splunk-provider.mdx b/docs/providers/documentation/splunk-provider.mdx
new file mode 100644
index 000000000..88956cd8b
--- /dev/null
+++ b/docs/providers/documentation/splunk-provider.mdx
@@ -0,0 +1,37 @@
+---
+title: "Splunk"
+sidebarTitle: "Splunk Provider"
+description: "Splunk provider allows you to get Splunk `saved searches` via webhook installation"
+---
+
+## Authentication Parameters
+The Splunk provider requires the following authentication parameter:
+
+- `Splunk UseAPI Key`: Required. This is your Splunk account username, which you use to log in to the Splunk platform.
+- `Host`: This is the hostname or IP address of the Splunk instance you wish to connect to. It identifies the Splunk server that the API will interact with.
+- `Port`: This is the network port on the Splunk server that is listening for API connections. The default port for Splunk's management API is typically 8089.
+- ``
+
+## Connecting with the Provider
+
+Obtain Splunk API Token:
+1. Ensure you have a Splunk account with the necessary [permissions](https://docs.splunk.com/Documentation/Splunk/9.2.0/Security/Rolesandcapabilities). The basic permissions required are `list_all_objects` & `edit_own_objects`.
+2. Get an API token for authenticating API requests. [Read More](https://docs.splunk.com/Documentation/Splunk/9.2.0/Security/Setupauthenticationwithtokens) on how to set up and get API Keys.
+
+Identify Your Splunk Instance Details:
+1. Determine the Host (IP address or hostname) and Port (default is 8089 for Splunk's management API) of the Splunk instance you wish to connect to.
+
+---
+**NOTE**
+Make sure to follow this [Guide](https://docs.splunk.com/Documentation/Splunk/9.2.0/Alert/ConfigureWebhookAllowList) to configure your webhook allow list to allow your `keep` deployment.
+---
+
+
+## Useful Links
+
+- [Splunk Python SDK](https://dev.splunk.com/view/python-sdk/SP-CAAAEBB)
+- [Splunk Webhook](https://docs.splunk.com/Documentation/Splunk/9.2.0/Alert/Webhooks)
+- [Splunk Webhook Allow List](https://docs.splunk.com/Documentation/Splunk/9.2.0/Alert/ConfigureWebhookAllowList)
+- [Splunk Permissions and Roles](https://docs.splunk.com/Documentation/Splunk/9.2.0/Security/Rolesandcapabilities)
+- [Splunk API tokens](https://docs.splunk.com/Documentation/Splunk/9.2.0/Security/Setupauthenticationwithtokens)
+
diff --git a/docs/providers/documentation/squadcast-provider.mdx b/docs/providers/documentation/squadcast-provider.mdx
index 35b7190c9..26cc7e2c6 100644
--- a/docs/providers/documentation/squadcast-provider.mdx
+++ b/docs/providers/documentation/squadcast-provider.mdx
@@ -15,6 +15,7 @@ The `notify` function take following parameters as inputs:
- `priority` (optional): Priority of the incident.
- `status` (optional): Status of the event.
- `event_id` (optional): event_id is used to resolve an incident
+ - `additional_json` (optional): Additional JSON data to be sent with the incident.
2. ##### parametres for `notes`
- `message` (required): The message of the note.
- `incident_id` (required): Id of the incident where the Note has to be created.
diff --git a/docs/workflows/functions/last.mdx b/docs/workflows/functions/last.mdx
new file mode 100644
index 000000000..a3ec04cee
--- /dev/null
+++ b/docs/workflows/functions/last.mdx
@@ -0,0 +1,27 @@
+---
+title: "last(iterable)"
+sidebarTitle: "last"
+---
+
+### Input
+
+An iterable.
+
+### Output
+
+The last item of the iterable.
+
+### Example
+
+```yaml
+actions:
+ - name: keep-slack
+ foreach: "{{steps.this.results}}"
+ condition:
+ - type: threshold
+ value: "keep.last(keep.split({{ foreach.value }}, ' '))"
+ # each line looks like:
+ # '2023-02-09 20:08:16,773 INFO: uvicorn.access -: 127.0.0.1:53948 - "GET /test2 HTTP/1.1" 503'
+ # where the "503" is the number of the
+ compare_to: 200
+```
diff --git a/docs/workflows/functions/lowercase.mdx b/docs/workflows/functions/lowercase.mdx
new file mode 100644
index 000000000..e945f4b62
--- /dev/null
+++ b/docs/workflows/functions/lowercase.mdx
@@ -0,0 +1,24 @@
+---
+title: "string(string)"
+sidebarTitle: "lowercase"
+---
+
+### Input
+
+A string.
+
+### Output
+
+Returns the string which is lowercased.
+
+### Example
+
+```yaml
+actions:
+ - name: trigger-slack
+ condition:
+ - type: equals
+ value: keep.lowercase('ABC DEF')
+ compare_to: "abc def"
+ compare_type: eq
+```
diff --git a/docs/workflows/functions/uppercase.mdx b/docs/workflows/functions/uppercase.mdx
new file mode 100644
index 000000000..45f3f6672
--- /dev/null
+++ b/docs/workflows/functions/uppercase.mdx
@@ -0,0 +1,24 @@
+---
+title: "string(string)"
+sidebarTitle: "uppercase"
+---
+
+### Input
+
+A string.
+
+### Output
+
+Returns the string which is uppercased.
+
+### Example
+
+```yaml
+actions:
+ - name: trigger-slack
+ condition:
+ - type: equals
+ value: keep.uppercase('abc def')
+ compare_to: "ABC DEF"
+ compare_type: eq
+```
diff --git a/examples/workflows/autosupress.yml b/examples/workflows/autosupress.yml
new file mode 100644
index 000000000..d9952098e
--- /dev/null
+++ b/examples/workflows/autosupress.yml
@@ -0,0 +1,16 @@
+workflow:
+ id: autosupress
+ description: demonstrates how to automatically suppress alerts
+ triggers:
+ - type: alert
+ filters:
+ - key: name
+ value: r"(somename)"
+ actions:
+ - name: dismiss-alert
+ provider:
+ type: mock
+ with:
+ enrich_alert:
+ - key: dismissed
+ value: "true"
diff --git a/examples/workflows/squadcast_example.yml b/examples/workflows/squadcast_example.yml
new file mode 100644
index 000000000..5849c5e3e
--- /dev/null
+++ b/examples/workflows/squadcast_example.yml
@@ -0,0 +1,15 @@
+workflow:
+ id: squadcast
+ description: squadcast
+ triggers:
+ - type: alert
+ actions:
+ - name: create-incident
+ provider:
+ config: "{{ providers.squadcast }}"
+ type: squadcast
+ with:
+ additional_json: '{{ alert }}'
+ description: TEST
+ message: '{{ alert.name }}-test'
+ notify_type: incident
diff --git a/keep-ui/app/alerts/alert-actions.tsx b/keep-ui/app/alerts/alert-actions.tsx
index 81a1b53a2..071aff98d 100644
--- a/keep-ui/app/alerts/alert-actions.tsx
+++ b/keep-ui/app/alerts/alert-actions.tsx
@@ -7,7 +7,7 @@ import { useAlerts } from "utils/hooks/useAlerts";
import { PlusIcon } from "@radix-ui/react-icons";
import { toast } from "react-toastify";
import { usePresets } from "utils/hooks/usePresets";
-import { usePathname, useRouter } from "next/navigation";
+import { useRouter } from "next/navigation";
interface Props {
selectedRowIds: string[];
@@ -20,7 +20,6 @@ export default function AlertActions({
alerts,
clearRowSelection,
}: Props) {
- const pathname = usePathname();
const router = useRouter();
const { useAllAlerts } = useAlerts();
const { mutate } = useAllAlerts({ revalidateOnFocus: false });
@@ -96,9 +95,17 @@ export default function AlertActions({
const distinctAlertNames = Array.from(
new Set(selectedAlerts.map((alert) => alert.name))
);
- const options = distinctAlertNames.map((name) => {
- return { value: `name=${name}`, label: `name=${name}` };
- });
+ const formattedCel = distinctAlertNames.reduce(
+ (accumulator, currentValue, currentIndex) => {
+ return (
+ accumulator +
+ (currentIndex > 0 ? " || " : "") +
+ `name == "${currentValue}"`
+ );
+ },
+ ""
+ );
+ const options = [{ value: formattedCel, label: "CEL" }];
const session = await getSession();
const apiUrl = getApiURL();
const response = await fetch(`${apiUrl}/preset`, {
diff --git a/keep-ui/app/alerts/alert-history.tsx b/keep-ui/app/alerts/alert-history.tsx
index d4c33ed8d..331e2d152 100644
--- a/keep-ui/app/alerts/alert-history.tsx
+++ b/keep-ui/app/alerts/alert-history.tsx
@@ -36,7 +36,8 @@ const AlertHistoryPanel = ({
return (
>;
isLoading: boolean;
+ table: Table;
}
-export default function AlertPresets({
- preset,
- alerts,
- selectedOptions,
- setSelectedOptions,
- isLoading,
-}: Props) {
+export default function AlertPresets({ preset, isLoading, table }: Props) {
const apiUrl = getApiURL();
const { useAllPresets } = usePresets();
- const { mutate: presetsMutator } = useAllPresets({
+ const { mutate: presetsMutator, data: savedPresets = [] } = useAllPresets({
revalidateOnFocus: false,
});
const { data: session } = useSession();
const router = useRouter();
- const selectRef = useRef(null);
- const [options, setOptions] = useState([]);
- const [inputValue, setInputValue] = useState("");
- const [isMenuOpen, setIsMenuOpen] = useState(false);
- const uniqueValuesMap = useMemo(() => {
- const newUniqueValuesMap = new Map>();
- if (alerts) {
- // Populating the map with keys and values
- alerts.forEach((alert) => {
- Object.entries(alert).forEach(([key, value]) => {
- if (typeof value !== "string" && key !== "source") return;
- if (!newUniqueValuesMap.has(key)) {
- newUniqueValuesMap.set(key, new Set());
- }
- if (key === "source") {
- value = value?.join(",");
- }
- if (!newUniqueValuesMap.get(key)?.has(value?.trim()))
- newUniqueValuesMap.get(key)?.add(value?.toString().trim());
- });
- });
- }
- return newUniqueValuesMap;
- }, [alerts]);
-
- // Initially, set options to keys
- useEffect(() => {
- setOptions(
- Array.from(uniqueValuesMap.keys()).map((key) => ({
- label: key,
- value: key,
- }))
- );
- }, [uniqueValuesMap]);
-
- const isValidNewOption = () => {
- // Only allow creating new options if the input includes '='
- return inputValue.includes("=");
- };
-
- // Handler for key down events
- const handleKeyDown = (event: React.KeyboardEvent) => {
- const inputElement = event.target as HTMLInputElement; // Cast to HTMLInputElement
-
- if (event.key === "Enter") {
- if (!inputElement.value.includes("=")) {
- event.preventDefault();
- }
- }
-
- if (event.key === "Tab") {
- event.preventDefault();
- // Only add to selectedOptions if focusedOption is not null
- const select = selectRef.current as any;
- if (select?.state.focusedOption) {
- const value = select.state.focusedOption.value;
- if (value.includes("=")) {
- handleInputChange(select.state.focusedOption.value);
- } else {
- handleInputChange(`${value}=`);
- }
- }
- }
- };
-
- const handleChange = (selected: any, actionMeta: any) => {
- if (
- actionMeta.action === "select-option" &&
- selected.some((option: any) => !option.value.includes("="))
- ) {
- // Handle invalid option selection
- handleInputChange(`${actionMeta.option.value}=`);
- // Optionally, you can prevent the selection or handle it differently
- } else {
- setSelectedOptions(selected);
- setIsMenuOpen(false);
- }
- };
-
- const handleInputChange = (inputValue: string) => {
- setInputValue(inputValue);
- if (inputValue.includes("=")) {
- const [inputKey, inputValuePart] = inputValue.split("=");
- if (uniqueValuesMap.has(inputKey)) {
- const filteredValues = Array.from(
- uniqueValuesMap.get(inputKey) || []
- ).filter((value) => value?.startsWith(inputValuePart));
- const newOptions = filteredValues.map((value) => ({
- label: `${inputKey}=${value}`,
- value: `${inputKey}=${value}`,
- }));
- setOptions(newOptions);
- } else {
- setOptions([]);
- }
- } else {
- setOptions(
- Array.from(uniqueValuesMap.keys()).map((key) => ({
- label: key,
- value: key,
- }))
- );
- }
- };
+ const [isModalOpen, setIsModalOpen] = useState(false);
+ const [presetName, setPresetName] = useState(
+ preset?.name === "feed" || preset?.name === "deleted" ? "" : preset?.name
+ );
+ const [isPrivate, setIsPrivate] = useState(preset?.is_private);
+ const [presetCEL, setPresetCEL] = useState("");
- const filterOption = ({ label }: Option, input: string) => {
- return label.toLowerCase().includes(input.toLowerCase());
- };
+ const selectedPreset = savedPresets.find(
+ (savedPreset) =>
+ savedPreset.name.toLowerCase() ===
+ decodeURIComponent(preset!.name).toLowerCase()
+ ) as Preset | undefined;
async function deletePreset(presetId: string) {
if (
@@ -163,111 +54,121 @@ export default function AlertPresets({
type: "success",
});
presetsMutator();
+ router.push("/alerts/feed");
}
}
}
async function addOrUpdatePreset() {
- const newPresetName = prompt(
- `${preset?.name ? "Update preset name?" : "Enter new preset name"}`,
- preset?.name === "feed" || preset?.name === "deleted" ? "" : preset?.name
- );
- if (newPresetName) {
- const options = selectedOptions.map((option) => {
- return {
- value: option.value,
- label: option.label,
- };
- });
+ if (presetName) {
const response = await fetch(
- preset?.id ? `${apiUrl}/preset/${preset?.id}` : `${apiUrl}/preset`,
+ selectedPreset?.id
+ ? `${apiUrl}/preset/${selectedPreset?.id}`
+ : `${apiUrl}/preset`,
{
- method: preset?.id ? "PUT" : "POST",
+ method: selectedPreset?.id ? "PUT" : "POST",
headers: {
Authorization: `Bearer ${session?.accessToken}`,
"Content-Type": "application/json",
},
- body: JSON.stringify({ name: newPresetName, options: options }),
+ body: JSON.stringify({
+ name: presetName,
+ options: [
+ {
+ label: "CEL",
+ value: presetCEL,
+ },
+ ],
+ is_private: isPrivate,
+ }),
}
);
if (response.ok) {
+ setIsModalOpen(false);
toast(
- preset?.name
- ? `Preset ${newPresetName} updated!`
- : `Preset ${newPresetName} created!`,
+ selectedPreset?.name
+ ? `Preset ${presetName} updated!`
+ : `Preset ${presetName} created!`,
{
position: "top-left",
type: "success",
}
);
await presetsMutator();
- router.push(`/alerts/${newPresetName.toLowerCase()}`);
+ router.push(`/alerts/${presetName.toLowerCase()}`);
}
}
}
return (
<>
- Filters
-
-
setIsMenuOpen(true)}
- onBlur={() => setIsMenuOpen(false)}
- isClearable={false}
- isDisabled={isLoading}
- />
- {preset?.name === "feed" && (
- await addOrUpdatePreset()}
- tooltip="Save current filter as a view"
- >
- Create Preset
-
- )}
- {preset?.name !== "deleted" && preset?.name !== "feed" && (
-
+
setIsModalOpen(false)}
+ className="w-[30%] max-w-screen-2xl max-h-[710px] transform overflow-auto ring-tremor bg-white p-6 text-left align-middle shadow-tremor transition-all rounded-xl"
+ >
+
+
+
+ {presetName ? "Update preset name?" : "Enter new preset name"}
+
+
+
+
+ setPresetName(e.target.value)}
+ className="w-full"
+ />
+
+
+
+ setIsPrivate(!isPrivate)}
+ />
+ Private
+
+
+
await addOrUpdatePreset()}
+ onClick={() => setIsModalOpen(false)}
+ tooltip="Close Modal"
>
- Save Preset
+ Close
{
- await deletePreset(preset!.id!);
- }}
+ onClick={addOrUpdatePreset}
+ tooltip="Save Modal"
>
- Delete Preset
+ Save
- )}
+
+
+
>
);
diff --git a/keep-ui/app/alerts/alert-table-tab-panel.tsx b/keep-ui/app/alerts/alert-table-tab-panel.tsx
index 72d0ea6d7..1db678c6c 100644
--- a/keep-ui/app/alerts/alert-table-tab-panel.tsx
+++ b/keep-ui/app/alerts/alert-table-tab-panel.tsx
@@ -1,10 +1,6 @@
-import { useState } from "react";
-import { RowSelectionState } from "@tanstack/react-table";
-import AlertPresets, { Option } from "./alert-presets";
import { AlertTable } from "./alert-table";
import { useAlertTableCols } from "./alert-table-utils";
import { AlertDto, AlertKnownKeys, Preset } from "./models";
-import AlertActions from "./alert-actions";
const getPresetAlerts = (alert: AlertDto, presetName: string): boolean => {
if (presetName === "deleted") {
@@ -26,40 +22,6 @@ const getPresetAlerts = (alert: AlertDto, presetName: string): boolean => {
return true;
};
-const getOptionAlerts = (alert: AlertDto, options: Option[]): boolean =>
- options.length > 0
- ? options.some((option) => {
- const [key, value] = option.value.split("=");
-
- if (key && value) {
- const attribute = key.toLowerCase() as keyof AlertDto;
- const lowercaseAttributeValue = value.toLowerCase();
-
- const alertAttributeValue = alert[attribute];
-
- if (Array.isArray(alertAttributeValue)) {
- return alertAttributeValue.every((v) =>
- lowercaseAttributeValue.split(",").includes(v)
- );
- }
-
- if (typeof alertAttributeValue === "string") {
- return alertAttributeValue
- .toLowerCase()
- .includes(lowercaseAttributeValue);
- }
- }
-
- return false;
- })
- : true;
-
-const getPresetAndOptionsAlerts = (
- alert: AlertDto,
- options: Option[],
- presetName: string
-) => getPresetAlerts(alert, presetName) && getOptionAlerts(alert, options);
-
interface Props {
alerts: AlertDto[];
preset: Preset;
@@ -79,25 +41,8 @@ export default function AlertTableTabPanel({
setRunWorkflowModalAlert,
setDismissModalAlert,
}: Props) {
- const [selectedOptions, setSelectedOptions] = useState
(
- preset.options
- );
-
- const [rowSelection, setRowSelection] = useState({});
- const selectedRowIds = Object.entries(rowSelection).reduce(
- (acc, [alertId, isSelected]) => {
- if (isSelected) {
- return acc.concat(alertId);
- }
- return acc;
- },
- []
- );
-
const sortedPresetAlerts = alerts
- .filter((alert) =>
- getPresetAndOptionsAlerts(alert, selectedOptions, preset.name)
- )
+ .filter((alert) => getPresetAlerts(alert, preset.name))
.sort((a, b) => b.lastReceived.getTime() - a.lastReceived.getTime());
const additionalColsToGenerate = [
@@ -121,29 +66,11 @@ export default function AlertTableTabPanel({
});
return (
- <>
- {selectedRowIds.length ? (
- setRowSelection({})}
- />
- ) : (
-
- )}
-
- >
+
);
}
diff --git a/keep-ui/app/alerts/alert-table-utils.tsx b/keep-ui/app/alerts/alert-table-utils.tsx
index ccfb73843..19dd5a761 100644
--- a/keep-ui/app/alerts/alert-table-utils.tsx
+++ b/keep-ui/app/alerts/alert-table-utils.tsx
@@ -258,7 +258,7 @@ export const useAlertTableCols = (
columnHelper.display({
id: "alertMenu",
meta: {
- tdClassName: "flex justify-end",
+ tdClassName: "sticky right-0",
},
size: 50,
cell: (context) => (
diff --git a/keep-ui/app/alerts/alert-table.tsx b/keep-ui/app/alerts/alert-table.tsx
index d5fb290e3..5dfe1ceff 100644
--- a/keep-ui/app/alerts/alert-table.tsx
+++ b/keep-ui/app/alerts/alert-table.tsx
@@ -3,8 +3,6 @@ import { AlertsTableBody } from "./alerts-table-body";
import { AlertDto } from "./models";
import { CircleStackIcon } from "@heroicons/react/24/outline";
import {
- OnChangeFn,
- RowSelectionState,
getCoreRowModel,
useReactTable,
getPaginationRowModel,
@@ -14,6 +12,7 @@ import {
ColumnSizingState,
getFilteredRowModel,
} from "@tanstack/react-table";
+
import AlertPagination from "./alert-pagination";
import AlertColumnsSelect from "./alert-columns-select";
import AlertsTableHeaders from "./alert-table-headers";
@@ -24,6 +23,9 @@ import {
DEFAULT_COLS_VISIBILITY,
DEFAULT_COLS,
} from "./alert-table-utils";
+import AlertActions from "./alert-actions";
+import AlertPresets from "./alert-presets";
+import { evalWithContext } from "./alerts-rules-builder";
interface Props {
alerts: AlertDto[];
@@ -32,10 +34,6 @@ interface Props {
presetName: string;
isMenuColDisplayed?: boolean;
isRefreshAllowed?: boolean;
- rowSelection?: {
- state: RowSelectionState;
- onChange: OnChangeFn;
- };
}
export function AlertTable({
@@ -43,7 +41,6 @@ export function AlertTable({
columns,
isAsyncLoading = false,
presetName,
- rowSelection,
isRefreshAllowed = true,
}: Props) {
const columnsIds = getColumnsIds(columns);
@@ -70,7 +67,6 @@ export function AlertTable({
columnVisibility: getOnlyVisibleCols(columnVisibility, columnsIds),
columnOrder: columnOrder,
columnSizing: columnSizing,
- rowSelection: rowSelection?.state,
columnPinning: {
left: ["checkbox"],
right: ["alertMenu"],
@@ -79,19 +75,40 @@ export function AlertTable({
initialState: {
pagination: { pageSize: 10 },
},
+ globalFilterFn: ({ original }, _id, value) => {
+ return evalWithContext(original, value);
+ },
getCoreRowModel: getCoreRowModel(),
getFilteredRowModel: getFilteredRowModel(),
getPaginationRowModel: getPaginationRowModel(),
- enableRowSelection: rowSelection !== undefined,
- onRowSelectionChange: rowSelection?.onChange,
onColumnSizingChange: setColumnSizing,
enableColumnPinning: true,
columnResizeMode: "onChange",
autoResetPageIndex: false,
+ enableGlobalFilter: true,
});
+ const selectedRowIds = Object.entries(
+ table.getSelectedRowModel().rowsById
+ ).reduce((acc, [alertId]) => {
+ return acc.concat(alertId);
+ }, []);
+
return (
<>
+ {selectedRowIds.length ? (
+
+ ) : (
+
+ )}
{isAsyncLoading && (
+ // make sure string is a String, and make sure pattern has the /g flag
+ String(string).match(new RegExp(pattern, "g"));
+
+const sanitizeCELIntoJS = (celExpression: string): string => {
+ // First, replace "contains" with "includes"
+ let jsExpression = celExpression.replace(/contains/g, "includes");
+ // Replace severity comparisons with mapped values
+ jsExpression = jsExpression.replace(
+ /severity\s*([<>=]+)\s*(\d)/g,
+ (match, operator, number) => {
+ const severityValue = severityMapping[number];
+ if (!severityValue) {
+ return match; // If no mapping found, return the original match
+ }
+
+ // For equality, directly replace with the severity level
+ if (operator === "==") {
+ return `severity == "${severityValue}"`;
+ }
+
+ // For greater than or less than, include multiple levels based on the mapping
+ const levels = Object.entries(severityMapping);
+ let replacement = "";
+ if (operator === ">") {
+ const filteredLevels = levels
+ .filter(([key]) => key > number)
+ .map(([, value]) => `severity == "${value}"`);
+ replacement = filteredLevels.join(" || ");
+ } else if (operator === "<") {
+ const filteredLevels = levels
+ .filter(([key]) => key < number)
+ .map(([, value]) => `severity == "${value}"`);
+ replacement = filteredLevels.join(" || ");
+ }
+
+ return `(${replacement})`;
+ }
+ );
+
+ // Convert 'in' syntax to '.includes()'
+ jsExpression = jsExpression.replace(
+ /(\w+)\s+in\s+\[([^\]]+)\]/g,
+ (match, variable, list) => {
+ // Split the list by commas, trim spaces, and wrap items in quotes if not already done
+ const items = list
+ .split(",")
+ .map((item: string) => item.trim().replace(/^([^"]*)$/, '"$1"'));
+ return `[${items.join(", ")}].includes(${variable})`;
+ }
+ );
+
+ return jsExpression;
+};
+
+// this pattern is far from robust
+const variablePattern = /[a-zA-Z$_][0-9a-zA-Z$_]*/;
+
+export const evalWithContext = (context: AlertDto, celExpression: string) => {
+ try {
+ if (celExpression.length === 0) {
+ return new Function();
+ }
+
+ const jsExpression = sanitizeCELIntoJS(celExpression);
+ const variables = (
+ getAllMatches(variablePattern, jsExpression) ?? []
+ ).filter((variable) => variable !== "true" && variable !== "false");
+
+ const func = new Function(...variables, `return (${jsExpression})`);
+
+ const args = variables.map((arg) =>
+ Object.hasOwnProperty.call(context, arg)
+ ? context[arg as keyof AlertDto]
+ : undefined
+ );
+
+ return func(...args);
+ } catch (error) {
+ return;
+ }
+};
+
+const getOperators = (id: string): Operator[] => {
+ if (id === "source") {
+ return [
+ { name: "contains", label: "contains" },
+ { name: "null", label: "null" },
+ ];
+ }
+
+ return defaultOperators;
+};
+
+type AlertsRulesBuilderProps = {
+ table: Table;
+ selectedPreset?: Preset;
+ defaultQuery: string | undefined;
+ setIsModalOpen: React.Dispatch>;
+ deletePreset: (presetId: string) => Promise;
+ setPresetCEL: React.Dispatch>;
+};
+
+const SQL_QUERY_PLACEHOLDER = `SELECT *
+FROM alerts
+WHERE severity = 'critical' and status = 'firing'`;
+
+export const AlertsRulesBuilder = ({
+ table,
+ selectedPreset,
+ defaultQuery = "",
+ setIsModalOpen,
+ deletePreset,
+ setPresetCEL,
+}: AlertsRulesBuilderProps) => {
+ const [isGUIOpen, setIsGUIOpen] = useState(false);
+ const [isImportSQLOpen, setImportSQLOpen] = useState(false);
+ const [sqlQuery, setSQLQuery] = useState("");
+ const [celRules, setCELRules] = useState(defaultQuery);
+
+ const parsedCELRulesToQuery = parseCEL(celRules);
+ const [query, setQuery] = useState(parsedCELRulesToQuery);
+ const [isValidCEL, setIsValidCEL] = useState(true);
+ const [sqlError, setSqlError] = useState(null);
+
+ const textAreaRef = useRef(null);
+
+ const isFirstRender = useRef(true);
+
+ const constructCELRules = (preset?: Preset) => {
+ // Check if selectedPreset is defined and has options
+ if (preset && preset.options) {
+ // New version: single "CEL" key
+ const celOption = preset.options.find((option) => option.label === "CEL");
+ if (celOption) {
+ return celOption.value;
+ }
+ // Older version: Concatenate multiple fields
+ else {
+ return preset.options
+ .map((option) => {
+ // Assuming the older format is exactly "x='y'" (x equals y)
+ // We split the string by '=', then trim and quote the value part
+ let [key, value] = option.value.split("=");
+ // Trim spaces and single quotes (if any) from the value
+ value = value.trim().replace(/^'(.*)'$/, "$1");
+ // Return the correctly formatted CEL expression
+ return `${key.trim()}=="${value}"`;
+ })
+ .join(" && ");
+ }
+ }
+ return ""; // Default to empty string if no preset or options are found
+ };
+
+ useEffect(() => {
+ // Use the constructCELRules function to set the initial value of celRules
+ const initialCELRules = constructCELRules(selectedPreset);
+ setCELRules(initialCELRules);
+ }, [selectedPreset]);
+
+ useEffect(() => {
+ // This effect waits for celRules to update and applies the filter only on the initial render
+ if (isFirstRender.current && celRules.length > 0) {
+ onApplyFilter();
+ isFirstRender.current = false;
+ } else if (!selectedPreset) {
+ isFirstRender.current = false;
+ }
+ // This effect should only run when celRules updates and on initial render
+ }, [celRules]);
+
+ // Adjust the height of the textarea based on its content
+ const adjustTextAreaHeight = () => {
+ const textArea = textAreaRef.current;
+ if (textArea) {
+ textArea.style.height = "auto";
+ textArea.style.height = `${textArea.scrollHeight}px`;
+ }
+ };
+ // Adjust the height whenever the content changes
+ useEffect(() => {
+ adjustTextAreaHeight();
+ }, [celRules]);
+
+ const handleClearInput = () => {
+ setCELRules("");
+ table.resetGlobalFilter();
+ setIsValidCEL(true);
+ };
+
+ const handleKeyDown = (e: React.KeyboardEvent) => {
+ if (e.key === "Enter") {
+ e.preventDefault(); // Prevents the default action of Enter key in a form
+ // You can now use `target` which is asserted to be an HTMLTextAreaElement
+
+ // check if the CEL is valid by comparing the parsed query with the original CEL
+ // remove spaces so that "a && b" is the same as "a&&b"
+ const celQuery = formatQuery(parsedCELRulesToQuery, "cel");
+ const isValidCEL =
+ celQuery.replace(/\s+/g, "") === celRules.replace(/\s+/g, "") ||
+ celRules === "";
+ setIsValidCEL(isValidCEL);
+ if (isValidCEL) {
+ onApplyFilter();
+ }
+ }
+ };
+
+ const onApplyFilter = () => {
+ if (celRules.length === 0) {
+ return table.resetGlobalFilter();
+ }
+
+ return table.setGlobalFilter(celRules);
+ };
+
+ const onGenerateQuery = () => {
+ setCELRules(formatQuery(query, "cel"));
+ setIsGUIOpen(false);
+ };
+
+ const fields: Field[] = table
+ .getAllColumns()
+ .filter(({ getIsPinned }) => getIsPinned() === false)
+ .map(({ id, columnDef }) => ({
+ name: id,
+ label: columnDef.header as string,
+ operators: getOperators(id),
+ }));
+
+ const onImportSQL = () => {
+ setImportSQLOpen(true);
+ };
+
+ const convertSQLToCEL = (sql: string): string | null => {
+ try {
+ const query = parseSQL(sql);
+ const formattedCel = formatQuery(query, "cel");
+ return formatQuery(parseCEL(formattedCel), "cel");
+ } catch (error) {
+ // If the caught error is an instance of Error, use its message
+ if (error instanceof Error) {
+ setSqlError(error.message);
+ } else {
+ setSqlError("An unknown error occurred while parsing SQL.");
+ }
+ return null;
+ }
+ };
+
+ const onImportSQLSubmit = () => {
+ const convertedCEL = convertSQLToCEL(sqlQuery);
+ if (convertedCEL) {
+ setCELRules(convertedCEL); // Set the converted CEL as the new CEL rules
+ setImportSQLOpen(false); // Close the modal
+ setSqlError(null); // Clear any previous errors
+ }
+ };
+
+ const onValueChange = (value: string) => {
+ setCELRules(value);
+ if (value.length === 0) {
+ setIsValidCEL(true);
+ }
+ };
+
+ const validateAndOpenSaveModal = (celExpression: string) => {
+ // Use existing validation logic
+ const celQuery = formatQuery(parseCEL(celExpression), "cel");
+ const isValidCEL =
+ celQuery.replace(/\s+/g, "") === celExpression.replace(/\s+/g, "") ||
+ celExpression === "";
+
+ if (isValidCEL && celExpression.length) {
+ // If CEL is valid and not empty, set the CEL rules for the preset and open the modal
+ setPresetCEL(celExpression);
+ setIsModalOpen(true);
+ } else {
+ // If CEL is invalid or empty, inform the user
+ alert("You can only save a valid CEL expression.");
+ setIsValidCEL(isValidCEL);
+ }
+ };
+
+ return (
+
+
setIsGUIOpen(false)}
+ className="w-[50%] max-w-screen-2xl max-h-[710px] transform overflow-auto ring-tremor bg-white p-6 text-left align-middle shadow-tremor transition-all rounded-xl"
+ title="Query Builder"
+ >
+
+
+ setQuery(newQuery)}
+ fields={fields}
+ addRuleToNewGroups
+ showCombinatorsBetweenRules={false}
+ />
+
+
+
+ Generate Query
+
+
+
+
+
{
+ setImportSQLOpen(false);
+ setSqlError(null);
+ }} // Clear the error when closing the modal
+ title="Import from SQL"
+ >
+
+
+ {sqlError && (
+
Error: {sqlError}
+ )}
+
0)}
+ >
+ Convert to CEL
+
+
+
+
+
+
+ {/* CEL badge and (i) icon container */}
+
+
+ window.open(
+ "https://docs.keephq.dev/overview/presets",
+ "_blank"
+ )
+ }
+ >
+ CEL
+
+
+
+ {/* Textarea and error message container */}
+
+
+ {!isValidCEL && (
+
+ Invalid Common Expression Logic expression.
+
+ )}
+ {celRules && (
+
+
+
+ )}
+
+ Enter to apply
+
+
+
+
+ {/* Buttons next to the Textarea */}
+
validateAndOpenSaveModal(celRules)}
+ tooltip="Save current filter as a view"
+ >
+ {selectedPreset &&
+ selectedPreset.name &&
+ selectedPreset?.name !== "deleted" &&
+ selectedPreset?.name !== "feed" &&
+ selectedPreset?.name !== "dismissed" && (
+
await deletePreset(selectedPreset!.id!)}
+ >
+ )}
+
+
+
+ );
+};
diff --git a/keep-ui/app/alerts/alerts.tsx b/keep-ui/app/alerts/alerts.tsx
index 2983cebcf..2e92b1ee6 100644
--- a/keep-ui/app/alerts/alerts.tsx
+++ b/keep-ui/app/alerts/alerts.tsx
@@ -16,10 +16,10 @@ import AlertRunWorkflowModal from "./alert-run-workflow-modal";
import AlertDismissModal from "./alert-dismiss-modal";
const defaultPresets: Preset[] = [
- { name: "feed", options: [] },
- { name: "deleted", options: [] },
- { name: "dismissed", options: []},
- { name: "groups", options: [] },
+ { name: "feed", options: [], is_private: false },
+ { name: "deleted", options: [], is_private: false },
+ { name: "dismissed", options: [], is_private: false },
+ { name: "groups", options: [], is_private: false },
];
type AlertsProps = {
@@ -64,10 +64,6 @@ export default function Alerts({ presetName }: AlertsProps) {
(preset) => preset.name.toLowerCase() === decodeURIComponent(presetName)
);
- if (selectedPreset === undefined) {
- router.push("/alerts/feed");
- }
-
return (
{pusherChannel && (
@@ -106,8 +102,8 @@ export default function Alerts({ presetName }: AlertsProps) {
handleClose={() => setRunWorkflowModalAlert(null)}
/>
setDismissModalAlert(null)}
+ alert={dismissModalAlert}
+ handleClose={() => setDismissModalAlert(null)}
/>
);
diff --git a/keep-ui/app/alerts/models.tsx b/keep-ui/app/alerts/models.tsx
index d7fb04d8d..a831294ea 100644
--- a/keep-ui/app/alerts/models.tsx
+++ b/keep-ui/app/alerts/models.tsx
@@ -1,5 +1,3 @@
-import { Option } from "./alert-presets";
-
export enum Severity {
Critical = "critical",
High = "high",
@@ -9,6 +7,14 @@ export enum Severity {
Error = "error",
}
+export const severityMapping: { [id: number]: string } = {
+ 1: Severity.Info,
+ 2: Severity.Low,
+ 3: Severity.Medium,
+ 4: Severity.High,
+ 5: Severity.Critical,
+};
+
export interface AlertDto {
id: string;
name: string;
@@ -37,10 +43,16 @@ export interface AlertDto {
note?: string;
}
+interface Option {
+ readonly label: string;
+ readonly value: string;
+}
+
export interface Preset {
id?: string;
name: string;
options: Option[];
+ is_private: boolean;
}
export const AlertKnownKeys = [
diff --git a/keep-ui/app/loading.tsx b/keep-ui/app/loading.tsx
index 06e78aa68..9596de9c9 100644
--- a/keep-ui/app/loading.tsx
+++ b/keep-ui/app/loading.tsx
@@ -15,7 +15,8 @@ export default function Loading({
}`}
>
("");
const [mapDescription, setMapDescription] = useState("");
const [selectedAttributes, setSelectedAttributes] = useState([]);
+ const [priority, setPriority] = useState(0);
/** This is everything related with the uploaded CSV file */
const [parsedData, setParsedData] = useState(null);
@@ -72,6 +76,7 @@ export default function CreateNewMapping() {
"Content-Type": "application/json",
},
body: JSON.stringify({
+ priority: priority,
name: mapName,
description: mapDescription,
file_name: fileName,
@@ -123,6 +128,20 @@ export default function CreateNewMapping() {
onValueChange={setMapDescription}
/>
+
+
+ Priority
+
+
+
+
context.row.original.id,
}),
+ columnHelper.display({
+ id: "priority",
+ header: "Priority",
+ cell: (context) => context.row.original.priority,
+ }),
columnHelper.display({
id: "name",
header: "Name",
@@ -84,7 +89,7 @@ export default function RulesTable({ mappings }: { mappings: MappingRule[] }) {
const table = useReactTable({
columns,
- data: mappings,
+ data: mappings.sort((a, b) => b.priority - a.priority),
getCoreRowModel: getCoreRowModel(),
});
diff --git a/keep-ui/app/providers/layout.tsx b/keep-ui/app/providers/layout.tsx
index a5f82163f..192fd456e 100644
--- a/keep-ui/app/providers/layout.tsx
+++ b/keep-ui/app/providers/layout.tsx
@@ -37,7 +37,6 @@ export default function ProvidersLayout({
{provider.provider_description}
)}
-
+ )}
{provider.scopes?.length > 0 && (
>
) : null}
-
-
- Provider Name
- *
-
-
-
+ {Object.keys(provider.config).length > 0 && (
+ <>
+
+
+ Provider Name
+ *
+
+
+
+ >
+ )}
{Object.keys(provider.config).map((configKey) => {
const method = provider.config[configKey];
@@ -639,7 +645,7 @@ const ProviderForm = ({
>
Cancel
- {installedProvidersMode && (
+ {installedProvidersMode && Object.keys(provider.config).length > 0 && (
<>
Delete
@@ -653,7 +659,7 @@ const ProviderForm = ({
>
)}
- {!installedProvidersMode && (
+ {!installedProvidersMode && Object.keys(provider.config).length > 0 && (
{
height={150}
onError={(event) => {
const target = event.target as HTMLImageElement;
- target.src = "keep.svg"; // Set fallback icon
+ target.src = "/keep.svg"; // Set fallback icon
}}
/>
diff --git a/keep-ui/app/providers/provider-semi-automated.tsx b/keep-ui/app/providers/provider-semi-automated.tsx
index c55182f50..f0f3ff72d 100644
--- a/keep-ui/app/providers/provider-semi-automated.tsx
+++ b/keep-ui/app/providers/provider-semi-automated.tsx
@@ -6,10 +6,13 @@ import { Subtitle, Title, Text, Icon } from "@tremor/react";
import { CopyBlock, a11yLight, railscast } from "react-code-blocks";
import Image from "next/image";
import { ArrowLongRightIcon } from "@heroicons/react/24/outline";
+import Markdown from "react-markdown";
+import remarkGfm from 'remark-gfm'
interface WebhookSettings {
webhookDescription: string;
webhookTemplate: string;
+ webhookMarkdown: string;
}
interface Props {
@@ -43,11 +46,12 @@ export const ProviderSemiAutomated = ({ provider, accessToken }: Props) => {
const isMultiline = data!.webhookDescription.includes('\n');
const descriptionLines = data!.webhookDescription.split('\n');
const settingsNotEmpty = settings.text.trim().length > 0;
+ const webhookMarkdown = data!.webhookMarkdown;
return (
Push alerts from{" "}
- {provider.type.charAt(0).toLocaleUpperCase() + provider.type.slice(1)}
+ {provider.type.charAt(0).toLocaleUpperCase() + provider.display_name.slice(1)}
{
/>
- Seamlessly push alerts without actively connecting {provider.type}
+ Seamlessly push alerts without actively connecting {provider.display_name}
{isMultiline ? (
descriptionLines.map((line, index) => (
@@ -79,6 +83,13 @@ export const ProviderSemiAutomated = ({ provider, accessToken }: Props) => {
{data!.webhookDescription}
)}
{settingsNotEmpty &&
}
+ {webhookMarkdown && (
+
+
+ {webhookMarkdown}
+
+
+ )}
);
};
diff --git a/keep-ui/app/providers/providers-tiles.tsx b/keep-ui/app/providers/providers-tiles.tsx
index 74570f6ce..077541852 100644
--- a/keep-ui/app/providers/providers-tiles.tsx
+++ b/keep-ui/app/providers/providers-tiles.tsx
@@ -98,7 +98,8 @@ const ProvidersTiles = ({
const providersWithConfig = providers
.filter((provider) => {
const config = (provider as Provider).config;
- return config && Object.keys(config).length > 0; // Filter out providers with empty config
+ // Filter out providers with empty config and providers that support webhooks
+ return (config && Object.keys(config).length > 0) || (provider.supports_webhook);
})
.sort(
(a, b) =>
diff --git a/keep-ui/app/rules/rules.client.tsx b/keep-ui/app/rules/rules.client.tsx
index 7e9a57e38..9e7c5ebff 100644
--- a/keep-ui/app/rules/rules.client.tsx
+++ b/keep-ui/app/rules/rules.client.tsx
@@ -32,7 +32,6 @@ import QueryBuilder, {
QueryValidator,
findPath,
} from "react-querybuilder";
-// import 'react-querybuilder/dist/query-builder.scss';
import { getApiURL } from "utils/apiUrl";
import { useSession } from "next-auth/react";
import Loading from "../loading";
diff --git a/keep-ui/app/settings/webhook-settings.tsx b/keep-ui/app/settings/webhook-settings.tsx
index 47e20c673..61fe81be8 100644
--- a/keep-ui/app/settings/webhook-settings.tsx
+++ b/keep-ui/app/settings/webhook-settings.tsx
@@ -136,7 +136,7 @@ req.end();
const resp = await fetch(data.webhookApi, requestOptions);
if (resp.ok) {
- router.push("/alerts");
+ router.push("/alerts/feed");
} else {
alert("Something went wrong! Please try again.");
}
diff --git a/keep-ui/app/workflows/[workflow_id]/runs/[workflow_execution_id]/page.tsx b/keep-ui/app/workflows/[workflow_id]/runs/[workflow_execution_id]/page.tsx
index 9b84c9cdd..7c2774a2f 100644
--- a/keep-ui/app/workflows/[workflow_id]/runs/[workflow_execution_id]/page.tsx
+++ b/keep-ui/app/workflows/[workflow_id]/runs/[workflow_execution_id]/page.tsx
@@ -189,11 +189,18 @@ export default function WorkflowExecutionPage({
) : (
- {error || "An unknown error occurred during execution."}
+ {error
+ ? error.split('\n').map((line, index) => (
+ // Render each line as a separate paragraph or div.
+ // The key is index, which is sufficient for simple lists like this.
+ {line}
+ ))
+ : "An unknown error occurred during execution."
+ }
)}
diff --git a/keep-ui/app/workflows/builder/alert.tsx b/keep-ui/app/workflows/builder/alert.tsx
index 369d4464c..d56a17b0e 100644
--- a/keep-ui/app/workflows/builder/alert.tsx
+++ b/keep-ui/app/workflows/builder/alert.tsx
@@ -7,6 +7,7 @@ interface Provider {
interface Step {
name: string;
provider: Provider;
+ if?: string;
}
interface Condition {
diff --git a/keep-ui/app/workflows/builder/builder-validators.tsx b/keep-ui/app/workflows/builder/builder-validators.tsx
index 4f489f9fe..ab21ace19 100644
--- a/keep-ui/app/workflows/builder/builder-validators.tsx
+++ b/keep-ui/app/workflows/builder/builder-validators.tsx
@@ -51,16 +51,6 @@ export function stepValidator(
definition: Definition,
setStepValidationError: Dispatch>
): boolean {
- if (step.type === "foreach") {
- // This checks if there's any step that is not action in foreach
- const foreachIncludesNotCondition = (step as SequentialStep).sequence.some(
- (step) => !step.type.includes("condition-")
- );
- if (foreachIncludesNotCondition) {
- setStepValidationError("Foreach can only contain conditions.");
- return false;
- }
- }
if (step.type.includes("condition-")) {
const onlyActions = (step as BranchedStep).branches.true.every((step) =>
step.type.includes("action-")
diff --git a/keep-ui/app/workflows/builder/editors.tsx b/keep-ui/app/workflows/builder/editors.tsx
index 52de3cda2..1e9bec407 100644
--- a/keep-ui/app/workflows/builder/editors.tsx
+++ b/keep-ui/app/workflows/builder/editors.tsx
@@ -344,7 +344,7 @@ function WorkflowEditor(properties: Properties, updateProperty: any) {
Add Filter
- {Object.keys(properties.alert as {}).map((filter) => {
+ {properties.alert && Object.keys(properties.alert as {}).map((filter) => {
return (
<>
{filter}
diff --git a/keep-ui/app/workflows/builder/utils.tsx b/keep-ui/app/workflows/builder/utils.tsx
index 388c3c54e..b63a1b93b 100644
--- a/keep-ui/app/workflows/builder/utils.tsx
+++ b/keep-ui/app/workflows/builder/utils.tsx
@@ -127,11 +127,32 @@ export function getActionOrStepObj(
};
}
+function generateForeach(
+ actionOrStep: any,
+ stepOrAction: "step" | "action",
+ providers?: Provider[],
+ sequence?: any
+) {
+ return {
+ id: Uid.next(),
+ type: "foreach",
+ componentType: "container",
+ name: "Foreach",
+ properties: {
+ value: actionOrStep.foreach,
+ },
+ sequence: [
+ sequence ?? getActionOrStepObj(actionOrStep, stepOrAction, providers),
+ ],
+ };
+}
+
export function generateCondition(
condition: any,
action: any,
providers?: Provider[]
): any {
+ const stepOrAction = action.type === "step" ? "step" : "action";
const generatedCondition = {
id: Uid.next(),
name: condition.name,
@@ -145,23 +166,14 @@ export function generateCondition(
assert: condition.assert,
},
branches: {
- true: [getActionOrStepObj(action, "action", providers)],
+ true: [getActionOrStepObj(action, stepOrAction, providers)],
false: [],
},
};
// If this is a foreach, we need to add the foreach to the condition
if (action.foreach) {
- return {
- id: Uid.next(),
- type: "foreach",
- componentType: "container",
- name: "Foreach",
- properties: {
- value: action.foreach,
- },
- sequence: [generatedCondition],
- };
+ return generateForeach(action, stepOrAction, providers, generatedCondition);
}
return generatedCondition;
@@ -203,15 +215,19 @@ export function parseWorkflow(
const workflow = parsedWorkflowFile.alert
? parsedWorkflowFile.alert
: parsedWorkflowFile.workflow;
- const steps =
- workflow.steps?.map((step: any) => {
- return getActionOrStepObj(step, "step", providers);
+ const steps = [] as any;
+ const workflowSteps =
+ workflow.steps?.map((s: any) => {
+ s.type = "step";
+ return s;
}) || [];
+ const workflowActions = workflow.actions || [];
const conditions = [] as any;
- workflow.actions?.forEach((action: any) => {
+ [...workflowSteps, ...workflowActions].forEach((action: any) => {
+ const stepOrAction = action.type === "step" ? "step" : "action";
// This means this action always runs, there's no condition and no alias
- if (!action.condition && !action.if) {
- steps.push(getActionOrStepObj(action, "action", providers));
+ if (!action.condition && !action.if && !action.foreach) {
+ steps.push(getActionOrStepObj(action, stepOrAction, providers));
}
// If this is an alias, we need to find the existing condition and add this action to it
else if (action.if) {
@@ -221,11 +237,17 @@ export function parseWorkflow(
);
if (existingCondition) {
existingCondition.branches.true.push(
- getActionOrStepObj(action, "action", providers)
+ getActionOrStepObj(action, stepOrAction, providers)
);
} else {
- steps.push(getActionOrStepObj(action, "action", providers));
+ if (action.foreach) {
+ steps.push(generateForeach(action, stepOrAction, providers));
+ } else {
+ steps.push(getActionOrStepObj(action, stepOrAction, providers));
+ }
}
+ } else if (action.foreach) {
+ steps.push(generateForeach(action, stepOrAction, providers));
} else {
action.condition.forEach((condition: any) => {
conditions.push(generateCondition(condition, action, providers));
@@ -237,7 +259,7 @@ export function parseWorkflow(
workflow.triggers?.reduce((prev: any, curr: any) => {
const currType = curr.type;
let value = curr.value;
- if (currType === "alert") {
+ if (currType === "alert" && curr.filters) {
value = curr.filters.reduce((prev: any, curr: any) => {
prev[curr.key] = curr.value;
return prev;
@@ -375,7 +397,33 @@ export function buildAlert(definition: Definition): Alert {
const condition = (forEach as SequentialStep).sequence.find((c) =>
c.type.startsWith("condition-")
) as BranchedStep;
- const foreachActions = getActionsFromCondition(condition, forEachValue);
+ let foreachActions = [] as Action[];
+ if (condition) {
+ foreachActions = getActionsFromCondition(condition, forEachValue);
+ } else {
+ const stepOrAction = (forEach as SequentialStep).sequence[0];
+ const withParams = getWithParams(stepOrAction);
+ const providerType = stepOrAction.type
+ .replace("action-", "")
+ .replace("step-", "");
+ const ifParam = stepOrAction.properties.if;
+ const providerName =
+ (stepOrAction.properties.config as string)?.trim() ||
+ `default-${providerType}`;
+ const provider: any = {
+ type: stepOrAction.type.replace("action-", "").replace("step-", ""),
+ config: `{{ providers.${providerName} }}`,
+ with: withParams,
+ };
+ foreachActions = [
+ {
+ name: stepOrAction.name,
+ provider: provider,
+ foreach: forEachValue,
+ if: ifParam as string,
+ },
+ ];
+ }
actions = [...actions, ...foreachActions];
});
// Actions > Condition
diff --git a/keep-ui/app/workflows/models.tsx b/keep-ui/app/workflows/models.tsx
index c126b821c..b2cb6502d 100644
--- a/keep-ui/app/workflows/models.tsx
+++ b/keep-ui/app/workflows/models.tsx
@@ -19,6 +19,7 @@ export type Trigger = {
export type Workflow = {
id: string;
+ name: string;
description: string;
created_by: string;
creation_time: string;
@@ -27,6 +28,7 @@ export type Workflow = {
triggers: Trigger[];
last_execution_time: string;
last_execution_status: string;
+ last_updated: string;
workflow_raw: string;
workflow_raw_id: string;
}
diff --git a/keep-ui/app/workflows/workflow-tile.tsx b/keep-ui/app/workflows/workflow-tile.tsx
index 1dac26c50..321d9180f 100644
--- a/keep-ui/app/workflows/workflow-tile.tsx
+++ b/keep-ui/app/workflows/workflow-tile.tsx
@@ -385,7 +385,7 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
)}
-
{workflow.description}
+ {workflow.name}
{WorkflowMenuSection({
onDelete: handleDeleteClick,
onRun: handleRunClick,
@@ -396,6 +396,10 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
})}
+
+ {workflow.description}
+
+
Created By
@@ -405,6 +409,10 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
Created At
{workflow.creation_time}
+
+ Last Updated
+ {workflow.last_updated}
+
Last Execution
diff --git a/keep-ui/app/workflows/workflows.client.tsx b/keep-ui/app/workflows/workflows.client.tsx
index 80e09d4a0..990af4818 100644
--- a/keep-ui/app/workflows/workflows.client.tsx
+++ b/keep-ui/app/workflows/workflows.client.tsx
@@ -4,7 +4,7 @@ import { useRef, useState } from "react";
import useSWR from "swr";
import { Callout, Subtitle } from "@tremor/react";
import {
- ArrowDownOnSquareIcon,
+ ArrowUpOnSquareStackIcon,
ExclamationCircleIcon,
PlusCircleIcon,
} from "@heroicons/react/24/outline";
@@ -51,39 +51,51 @@ export default function WorkflowsPage() {
}
const onDrop = async (files: any) => {
- const formData = new FormData();
- const file = files.target.files[0];
- formData.append("file", file);
-
- try {
- const response = await fetch(`${apiUrl}/workflows`, {
- method: "POST",
- headers: {
- Authorization: `Bearer ${session?.accessToken}`,
- },
- body: formData,
- });
-
- if (response.ok) {
- setFileError(null);
- if (fileInputRef.current) {
- fileInputRef.current.value = "";
+ const fileUpload = async (formData: FormData, reload: boolean) => {
+ try {
+ const response = await fetch(`${apiUrl}/workflows`, {
+ method: "POST",
+ headers: {
+ Authorization: `Bearer ${session?.accessToken}`,
+ },
+ body: formData,
+ });
+
+ if (response.ok) {
+ setFileError(null);
+ if (fileInputRef.current) {
+ fileInputRef.current.value = "";
+ }
+ if (reload) {
+ window.location.reload();
+ }
+ } else {
+ const errorMessage = await response.text();
+ setFileError(errorMessage);
+ if (fileInputRef.current) {
+ fileInputRef.current.value = "";
+ }
}
- window.location.reload();
- } else {
- const errorMessage = await response.text();
- setFileError(errorMessage);
+ } catch (error) {
+ setFileError("An error occurred during file upload");
if (fileInputRef.current) {
fileInputRef.current.value = "";
}
}
- } catch (error) {
- setFileError("An error occurred during file upload");
- if (fileInputRef.current) {
- fileInputRef.current.value = "";
+ };
+
+ const formData = new FormData();
+ var reload = false;
+
+ for (let i = 0; i < files.target.files.length; i++) {
+ const file = files.target.files[i];
+ formData.set("file", file);
+ if (files.target.files.length === i + 1) {
+ reload = true;
}
- }
- };
+ await fileUpload(formData, reload);
+ };
+ }
function handleStaticExampleSelect(example: string) {
// todo: something less static
@@ -165,21 +177,22 @@ export default function WorkflowsPage() {
onClick={() => {
setIsModalOpen(true);
}}
- icon={ArrowDownOnSquareIcon}
+ icon={ArrowUpOnSquareStackIcon}
>
- Upload a Workflow
+ Upload Workflows
setIsModalOpen(false)}
- title="Upload a Workflow file"
+ title="Upload Workflow files"
>
{
onDrop(e);
setIsModalOpen(false); // Add this line to close the modal
diff --git a/keep-ui/components/LinkWithIcon.tsx b/keep-ui/components/LinkWithIcon.tsx
index a4d7e420b..fe4ac36e6 100644
--- a/keep-ui/components/LinkWithIcon.tsx
+++ b/keep-ui/components/LinkWithIcon.tsx
@@ -21,7 +21,8 @@ export const LinkWithIcon = ({
}: StyledLinkProps) => {
const pathname = usePathname();
- const isActive = pathname === restOfLinkProps.href;
+ const isActive =
+ decodeURIComponent(pathname?.toLowerCase() || "") === restOfLinkProps.href;
return (
diff --git a/keep-ui/middleware.tsx b/keep-ui/middleware.tsx
index 66dfee92f..350acf077 100644
--- a/keep-ui/middleware.tsx
+++ b/keep-ui/middleware.tsx
@@ -27,8 +27,8 @@ export default withAuth(function middleware(req) {
// I guess first step should be some mapping ~ {role: [allowed_pages]}
// and the second step would be to get it dymnamically from an API
// or some role-based routing
- if (req.nextauth.token?.role === "noc" && pathname !== "/alerts") {
- return NextResponse.redirect(new URL("/alerts", req.url));
+ if (req.nextauth.token?.role === "noc" && !pathname.startsWith("/alerts")){
+ return NextResponse.redirect(new URL("/alerts/feed", req.url));
}
// Continue with the normal flow for other cases
diff --git a/keep-ui/package-lock.json b/keep-ui/package-lock.json
index 1402ac61c..4595e8f2f 100644
--- a/keep-ui/package-lock.json
+++ b/keep-ui/package-lock.json
@@ -133,7 +133,7 @@
"find-root": "^1.1.0",
"find-up": "^5.0.0",
"flat-cache": "^3.0.4",
- "follow-redirects": "^1.15.4",
+ "follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"fs.realpath": "^1.0.0",
"function-bind": "^1.1.1",
@@ -193,7 +193,7 @@
"isarray": "^2.0.5",
"isexe": "^2.0.0",
"jiti": "^1.18.2",
- "jose": "^4.14.4",
+ "jose": "^4.15.5",
"js-cookie": "^3.0.5",
"js-sdsl": "^4.4.0",
"js-tokens": "^4.0.0",
@@ -264,7 +264,7 @@
"postcss-nested": "^6.0.1",
"postcss-selector-parser": "^6.0.12",
"postcss-value-parser": "^4.2.0",
- "posthog-js": "^1.105.7",
+ "posthog-js": "^1.116.2",
"posthog-node": "^3.1.1",
"preact-render-to-string": "^5.2.6",
"prelude-ls": "^1.2.1",
@@ -282,6 +282,7 @@
"react-icons": "^4.9.0",
"react-loading": "^2.0.3",
"react-loading-skeleton": "^3.3.1",
+ "react-markdown": "^9.0.1",
"react-modal": "^3.16.1",
"react-name-initials-avatar": "^0.0.7",
"react-papaparse": "^4.4.0",
@@ -297,6 +298,7 @@
"regenerator-runtime": "^0.13.11",
"regexp.prototype.flags": "^1.5.0",
"regexpp": "^3.2.0",
+ "remark-gfm": "^4.0.0",
"resolve": "^1.22.2",
"resolve-from": "^4.0.0",
"reusify": "^1.0.4",
@@ -358,10 +360,13 @@
"zod": "^3.22.3"
},
"devDependencies": {
+ "@tailwindcss/typography": "^0.5.10",
"@types/js-cookie": "^3.0.3",
"@types/js-yaml": "^4.0.5",
+ "@types/json-logic-js": "^2.0.7",
+ "@types/json-query": "^2.2.6",
"@types/node": "20.2.1",
- "@types/react-datepicker": "^6.0.1",
+ "@types/react-datepicker": "^6.0.2",
"@types/react-modal": "^3.16.0",
"@types/uuid": "^9.0.1"
}
@@ -386,12 +391,12 @@
}
},
"node_modules/@ampproject/remapping": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz",
- "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==",
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
+ "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
"dependencies": {
- "@jridgewell/gen-mapping": "^0.3.0",
- "@jridgewell/trace-mapping": "^0.3.9"
+ "@jridgewell/gen-mapping": "^0.3.5",
+ "@jridgewell/trace-mapping": "^0.3.24"
},
"engines": {
"node": ">=6.0.0"
@@ -418,20 +423,20 @@
}
},
"node_modules/@babel/core": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.9.tgz",
- "integrity": "sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.0.tgz",
+ "integrity": "sha512-fQfkg0Gjkza3nf0c7/w6Xf34BW4YvzNfACRLmmb7XRLa6XHdR+K9AlJlxneFfWYf6uhOzuzZVTjF/8KfndZANw==",
"dependencies": {
"@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.23.5",
"@babel/generator": "^7.23.6",
"@babel/helper-compilation-targets": "^7.23.6",
"@babel/helper-module-transforms": "^7.23.3",
- "@babel/helpers": "^7.23.9",
- "@babel/parser": "^7.23.9",
- "@babel/template": "^7.23.9",
- "@babel/traverse": "^7.23.9",
- "@babel/types": "^7.23.9",
+ "@babel/helpers": "^7.24.0",
+ "@babel/parser": "^7.24.0",
+ "@babel/template": "^7.24.0",
+ "@babel/traverse": "^7.24.0",
+ "@babel/types": "^7.24.0",
"convert-source-map": "^2.0.0",
"debug": "^4.1.0",
"gensync": "^1.0.0-beta.2",
@@ -543,9 +548,9 @@
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="
},
"node_modules/@babel/helper-create-class-features-plugin": {
- "version": "7.23.10",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.23.10.tgz",
- "integrity": "sha512-2XpP2XhkXzgxecPNEEK8Vz8Asj9aRxt08oKOqtiZoqV2UGZ5T+EkyP9sXQ9nwMxBIG34a7jmasVqoMop7VdPUw==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.0.tgz",
+ "integrity": "sha512-QAH+vfvts51BCsNZ2PhY6HAggnlS6omLLFTsIpeqZk/MmJ6cW7tgz5yRv0fMJThcr6FmbMrENh1RgrWPTYA76g==",
"dependencies": {
"@babel/helper-annotate-as-pure": "^7.22.5",
"@babel/helper-environment-visitor": "^7.22.20",
@@ -694,9 +699,9 @@
}
},
"node_modules/@babel/helper-plugin-utils": {
- "version": "7.22.5",
- "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz",
- "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.0.tgz",
+ "integrity": "sha512-9cUznXMG0+FxRuJfvL82QlTqIzhVW9sL0KjMPHhAOOvpQGL8QtdxnBKILjBqxlHyliz0yCa1G903ZXI/FuHy2w==",
"engines": {
"node": ">=6.9.0"
}
@@ -804,13 +809,13 @@
}
},
"node_modules/@babel/helpers": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.9.tgz",
- "integrity": "sha512-87ICKgU5t5SzOT7sBMfCOZQ2rHjRU+Pcb9BoILMYz600W6DkVRLFBPwQ18gwUVvggqXivaUakpnxWQGbpywbBQ==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.0.tgz",
+ "integrity": "sha512-ulDZdc0Aj5uLc5nETsa7EPx2L7rM0YJM8r7ck7U73AXi7qOV44IHHRAYZHY6iU1rr3C5N4NtTmMRUJP6kwCWeA==",
"dependencies": {
- "@babel/template": "^7.23.9",
- "@babel/traverse": "^7.23.9",
- "@babel/types": "^7.23.9"
+ "@babel/template": "^7.24.0",
+ "@babel/traverse": "^7.24.0",
+ "@babel/types": "^7.24.0"
},
"engines": {
"node": ">=6.9.0"
@@ -830,9 +835,9 @@
}
},
"node_modules/@babel/parser": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.9.tgz",
- "integrity": "sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.0.tgz",
+ "integrity": "sha512-QuP/FxEAzMSjXygs8v4N9dvdXzEHN4W1oF3PxuWAtPo08UdM17u89RDMgjLn/mlc56iM0HlLmVkO/wgR+rDgHg==",
"bin": {
"parser": "bin/babel-parser.js"
},
@@ -1583,13 +1588,13 @@
}
},
"node_modules/@babel/plugin-transform-object-rest-spread": {
- "version": "7.23.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.23.4.tgz",
- "integrity": "sha512-9x9K1YyeQVw0iOXJlIzwm8ltobIIv7j2iLyP2jIhEbqPRQ7ScNgwQufU2I0Gq11VjyG4gI4yMXt2VFags+1N3g==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.0.tgz",
+ "integrity": "sha512-y/yKMm7buHpFFXfxVFS4Vk1ToRJDilIa6fKRioB9Vjichv58TDGXTvqV0dN7plobAmTW5eSEGXDngE+Mm+uO+w==",
"dependencies": {
- "@babel/compat-data": "^7.23.3",
- "@babel/helper-compilation-targets": "^7.22.15",
- "@babel/helper-plugin-utils": "^7.22.5",
+ "@babel/compat-data": "^7.23.5",
+ "@babel/helper-compilation-targets": "^7.23.6",
+ "@babel/helper-plugin-utils": "^7.24.0",
"@babel/plugin-syntax-object-rest-spread": "^7.8.3",
"@babel/plugin-transform-parameters": "^7.23.3"
},
@@ -1958,13 +1963,13 @@
}
},
"node_modules/@babel/preset-env": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.23.9.tgz",
- "integrity": "sha512-3kBGTNBBk9DQiPoXYS0g0BYlwTQYUTifqgKTjxUwEUkduRT2QOa0FPGBJ+NROQhGyYO5BuTJwGvBnqKDykac6A==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.0.tgz",
+ "integrity": "sha512-ZxPEzV9IgvGn73iK0E6VB9/95Nd7aMFpbE0l8KQFDG70cOV9IxRP7Y2FUPmlK0v6ImlLqYX50iuZ3ZTVhOF2lA==",
"dependencies": {
"@babel/compat-data": "^7.23.5",
"@babel/helper-compilation-targets": "^7.23.6",
- "@babel/helper-plugin-utils": "^7.22.5",
+ "@babel/helper-plugin-utils": "^7.24.0",
"@babel/helper-validator-option": "^7.23.5",
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.23.3",
"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.23.3",
@@ -2017,7 +2022,7 @@
"@babel/plugin-transform-new-target": "^7.23.3",
"@babel/plugin-transform-nullish-coalescing-operator": "^7.23.4",
"@babel/plugin-transform-numeric-separator": "^7.23.4",
- "@babel/plugin-transform-object-rest-spread": "^7.23.4",
+ "@babel/plugin-transform-object-rest-spread": "^7.24.0",
"@babel/plugin-transform-object-super": "^7.23.3",
"@babel/plugin-transform-optional-catch-binding": "^7.23.4",
"@babel/plugin-transform-optional-chaining": "^7.23.4",
@@ -2114,9 +2119,9 @@
"integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA=="
},
"node_modules/@babel/runtime": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz",
- "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.0.tgz",
+ "integrity": "sha512-Chk32uHMg6TnQdvw2e9IlqPpFX/6NLuK0Ys2PqLb7/gL5uFn9mXvK715FGLlOLQrcO4qIkNHkvPGktzzXexsFw==",
"dependencies": {
"regenerator-runtime": "^0.14.0"
},
@@ -2130,22 +2135,22 @@
"integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="
},
"node_modules/@babel/template": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.23.9.tgz",
- "integrity": "sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz",
+ "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==",
"dependencies": {
"@babel/code-frame": "^7.23.5",
- "@babel/parser": "^7.23.9",
- "@babel/types": "^7.23.9"
+ "@babel/parser": "^7.24.0",
+ "@babel/types": "^7.24.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.9.tgz",
- "integrity": "sha512-I/4UJ9vs90OkBtY6iiiTORVMyIhJ4kAVmsKo9KFc8UOxMeUfi2hvtIBsET5u9GizXE6/GFSuKCTNfgCswuEjRg==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.0.tgz",
+ "integrity": "sha512-HfuJlI8qq3dEDmNU5ChzzpZRWq+oxCZQyMzIMEqLho+AQnhMnKQUzH6ydo3RBl/YjPCuk68Y6s0Gx0AeyULiWw==",
"dependencies": {
"@babel/code-frame": "^7.23.5",
"@babel/generator": "^7.23.6",
@@ -2153,8 +2158,8 @@
"@babel/helper-function-name": "^7.23.0",
"@babel/helper-hoist-variables": "^7.22.5",
"@babel/helper-split-export-declaration": "^7.22.6",
- "@babel/parser": "^7.23.9",
- "@babel/types": "^7.23.9",
+ "@babel/parser": "^7.24.0",
+ "@babel/types": "^7.24.0",
"debug": "^4.3.1",
"globals": "^11.1.0"
},
@@ -2163,9 +2168,9 @@
}
},
"node_modules/@babel/types": {
- "version": "7.23.9",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.9.tgz",
- "integrity": "sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==",
+ "version": "7.24.0",
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.0.tgz",
+ "integrity": "sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==",
"dependencies": {
"@babel/helper-string-parser": "^7.23.4",
"@babel/helper-validator-identifier": "^7.22.20",
@@ -2278,9 +2283,9 @@
"integrity": "sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA=="
},
"node_modules/@emotion/react": {
- "version": "11.11.3",
- "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.11.3.tgz",
- "integrity": "sha512-Cnn0kuq4DoONOMcnoVsTOR8E+AdnKFf//6kUWc4LCdnxj31pZWn7rIULd6Y7/Js1PiPHzn7SKCM9vB/jBni8eA==",
+ "version": "11.11.4",
+ "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.11.4.tgz",
+ "integrity": "sha512-t8AjMlF0gHpvvxk5mAtCqR4vmxiGHCeJBaQO6gncUSdklELOgtwjerNY2yuJNfwnc6vi16U/+uMF+afIawJ9iw==",
"dependencies": {
"@babel/runtime": "^7.18.3",
"@emotion/babel-plugin": "^11.11.0",
@@ -2399,9 +2404,9 @@
}
},
"node_modules/@eslint/js": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz",
- "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==",
+ "version": "8.57.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz",
+ "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==",
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
@@ -2630,13 +2635,13 @@
}
},
"node_modules/@jridgewell/gen-mapping": {
- "version": "0.3.3",
- "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz",
- "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==",
+ "version": "0.3.5",
+ "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz",
+ "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==",
"dependencies": {
- "@jridgewell/set-array": "^1.0.1",
+ "@jridgewell/set-array": "^1.2.1",
"@jridgewell/sourcemap-codec": "^1.4.10",
- "@jridgewell/trace-mapping": "^0.3.9"
+ "@jridgewell/trace-mapping": "^0.3.24"
},
"engines": {
"node": ">=6.0.0"
@@ -2651,9 +2656,9 @@
}
},
"node_modules/@jridgewell/set-array": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz",
- "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==",
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
+ "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
"engines": {
"node": ">=6.0.0"
}
@@ -2664,23 +2669,23 @@
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg=="
},
"node_modules/@jridgewell/trace-mapping": {
- "version": "0.3.22",
- "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz",
- "integrity": "sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw==",
+ "version": "0.3.25",
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
+ "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@next/env": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.0.tgz",
- "integrity": "sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw=="
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.1.tgz",
+ "integrity": "sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA=="
},
"node_modules/@next/eslint-plugin-next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.0.tgz",
- "integrity": "sha512-x4FavbNEeXx/baD/zC/SdrvkjSby8nBn8KcCREqk6UuwvwoAPZmaV8TFCAuo/cpovBRTIY67mHhe86MQQm/68Q==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.1.tgz",
+ "integrity": "sha512-NP1WoGFnFLpqqCWgGFjnn/sTwUExdPyjeFKRdQP1X/bL/tjAQ/TXDmYqw6vzGaP5NaZ2u6xzg+N/0nd7fOPOGQ==",
"dependencies": {
"glob": "10.3.10"
}
@@ -2729,9 +2734,9 @@
}
},
"node_modules/@next/swc-darwin-arm64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.0.tgz",
- "integrity": "sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz",
+ "integrity": "sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==",
"cpu": [
"arm64"
],
@@ -2744,9 +2749,9 @@
}
},
"node_modules/@next/swc-darwin-x64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.0.tgz",
- "integrity": "sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz",
+ "integrity": "sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==",
"cpu": [
"x64"
],
@@ -2759,9 +2764,9 @@
}
},
"node_modules/@next/swc-linux-arm64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.0.tgz",
- "integrity": "sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz",
+ "integrity": "sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==",
"cpu": [
"arm64"
],
@@ -2774,9 +2779,9 @@
}
},
"node_modules/@next/swc-linux-arm64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.0.tgz",
- "integrity": "sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz",
+ "integrity": "sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==",
"cpu": [
"arm64"
],
@@ -2789,9 +2794,9 @@
}
},
"node_modules/@next/swc-linux-x64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.0.tgz",
- "integrity": "sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz",
+ "integrity": "sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==",
"cpu": [
"x64"
],
@@ -2804,9 +2809,9 @@
}
},
"node_modules/@next/swc-linux-x64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.0.tgz",
- "integrity": "sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz",
+ "integrity": "sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==",
"cpu": [
"x64"
],
@@ -2819,9 +2824,9 @@
}
},
"node_modules/@next/swc-win32-arm64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.0.tgz",
- "integrity": "sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz",
+ "integrity": "sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==",
"cpu": [
"arm64"
],
@@ -2834,9 +2839,9 @@
}
},
"node_modules/@next/swc-win32-ia32-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.0.tgz",
- "integrity": "sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz",
+ "integrity": "sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==",
"cpu": [
"ia32"
],
@@ -2849,9 +2854,9 @@
}
},
"node_modules/@next/swc-win32-x64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.0.tgz",
- "integrity": "sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz",
+ "integrity": "sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==",
"cpu": [
"x64"
],
@@ -3235,12 +3240,40 @@
"tslib": "^2.4.0"
}
},
+ "node_modules/@tailwindcss/typography": {
+ "version": "0.5.10",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.10.tgz",
+ "integrity": "sha512-Pe8BuPJQJd3FfRnm6H0ulKIGoMEQS+Vq01R6M5aCrFB/ccR/shT+0kXLjouGC1gFLm9hopTFN+DMP0pfwRWzPw==",
+ "dev": true,
+ "dependencies": {
+ "lodash.castarray": "^4.4.0",
+ "lodash.isplainobject": "^4.0.6",
+ "lodash.merge": "^4.6.2",
+ "postcss-selector-parser": "6.0.10"
+ },
+ "peerDependencies": {
+ "tailwindcss": ">=3.0.0 || insiders"
+ }
+ },
+ "node_modules/@tailwindcss/typography/node_modules/postcss-selector-parser": {
+ "version": "6.0.10",
+ "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz",
+ "integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==",
+ "dev": true,
+ "dependencies": {
+ "cssesc": "^3.0.0",
+ "util-deprecate": "^1.0.2"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
"node_modules/@tanstack/react-table": {
- "version": "8.12.0",
- "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.12.0.tgz",
- "integrity": "sha512-LlEQ1Gpz4bfpiET+qmle4BhKDgKN3Y/sssc+O/wLqX8HRtjV+nhusYbllZlutZfMR8oeef83whKTj/VhaV8EeA==",
+ "version": "8.13.2",
+ "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.13.2.tgz",
+ "integrity": "sha512-b6mR3mYkjRtJ443QZh9sc7CvGTce81J35F/XMr0OoWbx0KIM7TTTdyNP2XKObvkLpYnLpCrYDwI3CZnLezWvpg==",
"dependencies": {
- "@tanstack/table-core": "8.12.0"
+ "@tanstack/table-core": "8.13.2"
},
"engines": {
"node": ">=12"
@@ -3255,11 +3288,11 @@
}
},
"node_modules/@tanstack/react-virtual": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.0.4.tgz",
- "integrity": "sha512-tiqKW/e2MJVCr7/pRUXulpkyxllaOclkHNfhKTo4pmHjJIqnhMfwIjc1Q1R0Un3PI3kQywywu/791c8z9u0qeA==",
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.1.3.tgz",
+ "integrity": "sha512-YCzcbF/Ws/uZ0q3Z6fagH+JVhx4JLvbSflgldMgLsuvB8aXjZLLb3HvrEVxY480F9wFlBiXlvQxOyXb5ENPrNA==",
"dependencies": {
- "@tanstack/virtual-core": "3.0.0"
+ "@tanstack/virtual-core": "3.1.3"
},
"funding": {
"type": "github",
@@ -3271,9 +3304,9 @@
}
},
"node_modules/@tanstack/table-core": {
- "version": "8.12.0",
- "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.12.0.tgz",
- "integrity": "sha512-cq/ylWVrOwixmwNXQjgZaQw1Izf7+nPxjczum7paAnMtwPg1S2qRAJU+Jb8rEBUWm69voC/zcChmePlk2hc6ug==",
+ "version": "8.13.2",
+ "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.13.2.tgz",
+ "integrity": "sha512-/2saD1lWBUV6/uNAwrsg2tw58uvMJ07bO2F1IWMxjFRkJiXKQRuc3Oq2aufeobD3873+4oIM/DRySIw7+QsPPw==",
"engines": {
"node": ">=12"
},
@@ -3283,18 +3316,18 @@
}
},
"node_modules/@tanstack/virtual-core": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.0.0.tgz",
- "integrity": "sha512-SYXOBTjJb05rXa2vl55TTwO40A6wKu0R5i1qQwhJYNDIqaIGF7D0HsLw+pJAyi2OvntlEIVusx3xtbbgSUi6zg==",
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.1.3.tgz",
+ "integrity": "sha512-Y5B4EYyv1j9V8LzeAoOVeTg0LI7Fo5InYKgAjkY1Pu9GjtUwX/EKxNcU7ng3sKr99WEf+bPTcktAeybyMOYo+g==",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
}
},
"node_modules/@tremor/react": {
- "version": "3.14.0",
- "resolved": "https://registry.npmjs.org/@tremor/react/-/react-3.14.0.tgz",
- "integrity": "sha512-bDIaId3js6S0LMhSypLN31l98t13XwPmhF6B1NIZUId/zZwnuE25z95VbKUL8NzHuCETIXSAp+Mm+OyA9EeAFw==",
+ "version": "3.14.1",
+ "resolved": "https://registry.npmjs.org/@tremor/react/-/react-3.14.1.tgz",
+ "integrity": "sha512-0LMxFIeBXsAaPnR6mXRK4fbZaTNLFfVngFpoOt+6Tf797k/c6yUkB48/QPB5vO02qzkV74D91hng9r6HwfDW5g==",
"dependencies": {
"@floating-ui/react": "^0.19.2",
"@headlessui/react": "^1.7.18",
@@ -3372,6 +3405,27 @@
"resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz",
"integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw=="
},
+ "node_modules/@types/debug": {
+ "version": "4.1.12",
+ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
+ "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
+ "dependencies": {
+ "@types/ms": "*"
+ }
+ },
+ "node_modules/@types/estree": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
+ "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw=="
+ },
+ "node_modules/@types/estree-jsx": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz",
+ "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==",
+ "dependencies": {
+ "@types/estree": "*"
+ }
+ },
"node_modules/@types/hast": {
"version": "2.3.10",
"resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
@@ -3392,11 +3446,36 @@
"integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==",
"dev": true
},
+ "node_modules/@types/json-logic-js": {
+ "version": "2.0.7",
+ "resolved": "https://registry.npmjs.org/@types/json-logic-js/-/json-logic-js-2.0.7.tgz",
+ "integrity": "sha512-fucvZmbjqa1+gpw/nIwcP+ZIYHTvmwxuQQFKw/yU7+ZSD63z/xgY5pWN7sYUDRzg2Wf9STapL+7c66FNzhU6+Q==",
+ "dev": true
+ },
+ "node_modules/@types/json-query": {
+ "version": "2.2.6",
+ "resolved": "https://registry.npmjs.org/@types/json-query/-/json-query-2.2.6.tgz",
+ "integrity": "sha512-zP6rRewNus7jEj04RjxLnseONCNz46bo/H1Nj7qfNHyFaH0HPiB3rMqBgxYw8sTTG3fRDnA1uNBCTRNQJ8UUOA==",
+ "dev": true
+ },
"node_modules/@types/json5": {
"version": "0.0.29",
"resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz",
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ=="
},
+ "node_modules/@types/mdast": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.3.tgz",
+ "integrity": "sha512-LsjtqsyF+d2/yFOYaN22dHZI1Cpwkrj+g06G8+qtUKlhovPW89YhqSnfKtMbkgmEtYpH2gydRNULd6y8mciAFg==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/@types/ms": {
+ "version": "0.7.34",
+ "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz",
+ "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g=="
+ },
"node_modules/@types/node": {
"version": "20.2.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.1.tgz",
@@ -3429,9 +3508,9 @@
}
},
"node_modules/@types/react": {
- "version": "18.2.55",
- "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.55.tgz",
- "integrity": "sha512-Y2Tz5P4yz23brwm2d7jNon39qoAtMMmalOQv6+fEFt1mT+FcM3D841wDpoUvFXhaYenuROCy3FZYqdTjM7qVyA==",
+ "version": "18.2.61",
+ "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.61.tgz",
+ "integrity": "sha512-NURTN0qNnJa7O/k4XUkEW2yfygA+NxS0V5h1+kp9jPwhzZy95q3ADoGMP0+JypMhrZBTTgjKAUlTctde1zzeQA==",
"dependencies": {
"@types/prop-types": "*",
"@types/scheduler": "*",
@@ -3439,9 +3518,9 @@
}
},
"node_modules/@types/react-datepicker": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/@types/react-datepicker/-/react-datepicker-6.0.1.tgz",
- "integrity": "sha512-oAavACLXz+Vm6kO1XZe1/LQJClOO32UUv4xva9G16KQJ2hNROtXyeGzmRg6mktHrQ+YGLnnNlN6S/XykQE2HMA==",
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/@types/react-datepicker/-/react-datepicker-6.0.2.tgz",
+ "integrity": "sha512-RnBAD9hO9GgSNZ2WjUZKltP3OkLxHNxmvFHyp8SC5A5qItPH20VWj/4krJ3iqGUiH1pqV/vRTOQKeJfEOqcXSw==",
"dev": true,
"dependencies": {
"@floating-ui/react": "^0.26.2",
@@ -3907,9 +3986,9 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/autoprefixer": {
- "version": "10.4.17",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz",
- "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==",
+ "version": "10.4.18",
+ "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.18.tgz",
+ "integrity": "sha512-1DKbDfsr6KUElM6wg+0zRNkB/Q7WcKYAaK+pzXn+Xqmszm/5Xa9coeNdtP88Vi+dPzZnMjhge8GIV49ZQkDa+g==",
"funding": [
{
"type": "opencollective",
@@ -3925,8 +4004,8 @@
}
],
"dependencies": {
- "browserslist": "^4.22.2",
- "caniuse-lite": "^1.0.30001578",
+ "browserslist": "^4.23.0",
+ "caniuse-lite": "^1.0.30001591",
"fraction.js": "^4.3.7",
"normalize-range": "^0.1.2",
"picocolors": "^1.0.0",
@@ -3943,9 +4022,12 @@
}
},
"node_modules/available-typed-arrays": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.6.tgz",
- "integrity": "sha512-j1QzY8iPNPG4o4xmO3ptzpRxTciqD3MgEHtifP/YnJpIo58Xu+ne4BejlbkuaLfXn/nz6HFiw29bLpj2PNMdGg==",
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
+ "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
+ "dependencies": {
+ "possible-typed-array-names": "^1.0.0"
+ },
"engines": {
"node": ">= 0.4"
},
@@ -4042,21 +4124,30 @@
"@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0"
}
},
+ "node_modules/bail": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz",
+ "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/bare-events": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.0.tgz",
- "integrity": "sha512-Yyyqff4PIFfSuthCZqLlPISTWHmnQxoPuAvkmgzsJEmG3CesdIv6Xweayl0JkCZJSB2yYIdJyEz97tpxNhgjbg==",
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.1.tgz",
+ "integrity": "sha512-9GYPpsPFvrWBkelIhOhTWtkeZxVxZOdb3VnFTCzlOo3OjvmTvzLoZFUT8kNFACx0vJej6QPney1Cf9BvzCNE/A==",
"optional": true
},
"node_modules/bare-fs": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-2.1.5.tgz",
- "integrity": "sha512-5t0nlecX+N2uJqdxe9d18A98cp2u9BETelbjKpiVgQqzzmVNFYWEAjQHqS+2Khgto1vcwhik9cXucaj5ve2WWA==",
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-2.2.1.tgz",
+ "integrity": "sha512-+CjmZANQDFZWy4PGbVdmALIwmt33aJg8qTkVjClU6X4WmZkTPBDxRHiBn7fpqEWEfF3AC2io++erpViAIQbSjg==",
"optional": true,
"dependencies": {
"bare-events": "^2.0.0",
@@ -4294,9 +4385,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001587",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001587.tgz",
- "integrity": "sha512-HMFNotUmLXn71BQxg8cijvqxnIAofforZOwGsxyXJ0qugTdspUF4sPSJ2vhgprHCB996tIDzEq1ubumPDV8ULA==",
+ "version": "1.0.30001591",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001591.tgz",
+ "integrity": "sha512-PCzRMei/vXjJyL5mJtzNiUCKP59dm8Apqc3PH8gJkMnMXZGox93RbE76jHsmLwmIo6/3nsYIpJtx0O7u5PqFuQ==",
"funding": [
{
"type": "opencollective",
@@ -4312,6 +4403,15 @@
}
]
},
+ "node_modules/ccount": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
+ "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
@@ -4385,6 +4485,15 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/character-entities-html4": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz",
+ "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/character-entities-legacy": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz",
@@ -4862,6 +4971,27 @@
"resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz",
"integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg=="
},
+ "node_modules/decode-named-character-reference": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz",
+ "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==",
+ "dependencies": {
+ "character-entities": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/decode-named-character-reference/node_modules/character-entities": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
+ "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/decompress-response": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
@@ -5027,6 +5157,18 @@
"node": ">=8"
}
},
+ "node_modules/devlop": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz",
+ "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==",
+ "dependencies": {
+ "dequal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/didyoumean": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
@@ -5134,9 +5276,9 @@
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
},
"node_modules/electron-to-chromium": {
- "version": "1.4.670",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.670.tgz",
- "integrity": "sha512-hcijYOWjOtjKrKPtNA6tuLlA/bTLO3heFG8pQA6mLpq7dRydSWicXova5lyxDzp1iVJaYhK7J2OQlGE52KYn7A=="
+ "version": "1.4.690",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.690.tgz",
+ "integrity": "sha512-+2OAGjUx68xElQhydpcbqH50hE8Vs2K6TkAeLhICYfndb67CVH0UsZaijmRUE3rHlIxU1u0jxwhgVe6fK3YANA=="
},
"node_modules/emoji-regex": {
"version": "9.2.2",
@@ -5152,9 +5294,9 @@
}
},
"node_modules/enhanced-resolve": {
- "version": "5.15.0",
- "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz",
- "integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==",
+ "version": "5.15.1",
+ "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.1.tgz",
+ "integrity": "sha512-3d3JRbwsCLJsYgvb6NuWEG44jjPSOMuS73L/6+7BZuoKm3W+qXnSoIYVHi8dG7Qcg4inAY4jbzkZ7MnskePeDg==",
"dependencies": {
"graceful-fs": "^4.2.4",
"tapable": "^2.2.0"
@@ -5183,17 +5325,17 @@
}
},
"node_modules/es-abstract": {
- "version": "1.22.4",
- "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.4.tgz",
- "integrity": "sha512-vZYJlk2u6qHYxBOTjAeg7qUxHdNfih64Uu2J8QqWgXZ2cri0ZpJAkzDUK/q593+mvKwlxyaxr6F1Q+3LKoQRgg==",
+ "version": "1.22.5",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.5.tgz",
+ "integrity": "sha512-oW69R+4q2wG+Hc3KZePPZxOiisRIqfKBVo/HLx94QcJeWGU/8sZhCvc829rd1kS366vlJbzBfXf9yWwf0+Ko7w==",
"dependencies": {
"array-buffer-byte-length": "^1.0.1",
"arraybuffer.prototype.slice": "^1.0.3",
- "available-typed-arrays": "^1.0.6",
+ "available-typed-arrays": "^1.0.7",
"call-bind": "^1.0.7",
"es-define-property": "^1.0.0",
"es-errors": "^1.3.0",
- "es-set-tostringtag": "^2.0.2",
+ "es-set-tostringtag": "^2.0.3",
"es-to-primitive": "^1.2.1",
"function.prototype.name": "^1.1.6",
"get-intrinsic": "^1.2.4",
@@ -5201,15 +5343,15 @@
"globalthis": "^1.0.3",
"gopd": "^1.0.1",
"has-property-descriptors": "^1.0.2",
- "has-proto": "^1.0.1",
+ "has-proto": "^1.0.3",
"has-symbols": "^1.0.3",
"hasown": "^2.0.1",
"internal-slot": "^1.0.7",
"is-array-buffer": "^3.0.4",
"is-callable": "^1.2.7",
- "is-negative-zero": "^2.0.2",
+ "is-negative-zero": "^2.0.3",
"is-regex": "^1.1.4",
- "is-shared-array-buffer": "^1.0.2",
+ "is-shared-array-buffer": "^1.0.3",
"is-string": "^1.0.7",
"is-typed-array": "^1.1.13",
"is-weakref": "^1.0.2",
@@ -5222,10 +5364,10 @@
"string.prototype.trim": "^1.2.8",
"string.prototype.trimend": "^1.0.7",
"string.prototype.trimstart": "^1.0.7",
- "typed-array-buffer": "^1.0.1",
- "typed-array-byte-length": "^1.0.0",
- "typed-array-byte-offset": "^1.0.0",
- "typed-array-length": "^1.0.4",
+ "typed-array-buffer": "^1.0.2",
+ "typed-array-byte-length": "^1.0.1",
+ "typed-array-byte-offset": "^1.0.2",
+ "typed-array-length": "^1.0.5",
"unbox-primitive": "^1.0.2",
"which-typed-array": "^1.1.14"
},
@@ -5305,13 +5447,13 @@
}
},
"node_modules/es-set-tostringtag": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz",
- "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==",
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz",
+ "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==",
"dependencies": {
- "get-intrinsic": "^1.2.2",
- "has-tostringtag": "^1.0.0",
- "hasown": "^2.0.0"
+ "get-intrinsic": "^1.2.4",
+ "has-tostringtag": "^1.0.2",
+ "hasown": "^2.0.1"
},
"engines": {
"node": ">= 0.4"
@@ -5361,15 +5503,15 @@
}
},
"node_modules/eslint": {
- "version": "8.56.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz",
- "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==",
+ "version": "8.57.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz",
+ "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
"@eslint/eslintrc": "^2.1.4",
- "@eslint/js": "8.56.0",
- "@humanwhocodes/config-array": "^0.11.13",
+ "@eslint/js": "8.57.0",
+ "@humanwhocodes/config-array": "^0.11.14",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"@ungap/structured-clone": "^1.2.0",
@@ -5415,11 +5557,11 @@
}
},
"node_modules/eslint-config-next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.1.0.tgz",
- "integrity": "sha512-SBX2ed7DoRFXC6CQSLc/SbLY9Ut6HxNB2wPTcoIWjUMd7aF7O/SIE7111L8FdZ9TXsNV4pulUDnfthpyPtbFUg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.1.1.tgz",
+ "integrity": "sha512-OLyw2oHzwE0M0EODGYMbjksDQKSshQWBzYY+Nkoxoe3+Q5G0lpb9EkekyDk7Foz9BMfotbYShJrgYoBEAVqU4Q==",
"dependencies": {
- "@next/eslint-plugin-next": "14.1.0",
+ "@next/eslint-plugin-next": "14.1.1",
"@rushstack/eslint-patch": "^1.3.3",
"@typescript-eslint/parser": "^5.4.2 || ^6.0.0",
"eslint-import-resolver-node": "^0.3.6",
@@ -5482,9 +5624,9 @@
}
},
"node_modules/eslint-module-utils": {
- "version": "2.8.0",
- "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz",
- "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==",
+ "version": "2.8.1",
+ "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz",
+ "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==",
"dependencies": {
"debug": "^3.2.7"
},
@@ -5805,6 +5947,15 @@
"node": ">=4.0"
}
},
+ "node_modules/estree-util-is-identifier-name": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz",
+ "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/esutils": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
@@ -5998,14 +6149,14 @@
}
},
"node_modules/flatted": {
- "version": "3.2.9",
- "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz",
- "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ=="
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
+ "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw=="
},
"node_modules/follow-redirects": {
- "version": "1.15.5",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
- "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
+ "version": "1.15.6",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
+ "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
@@ -6341,9 +6492,9 @@
}
},
"node_modules/has-proto": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz",
- "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==",
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
+ "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
"engines": {
"node": ">= 0.4"
},
@@ -6396,6 +6547,92 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/hast-util-to-jsx-runtime": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.0.tgz",
+ "integrity": "sha512-H/y0+IWPdsLLS738P8tDnrQ8Z+dj12zQQ6WC11TIM21C8WFVoIxcqWXf2H3hiTVZjF1AWqoimGwrTWecWrnmRQ==",
+ "dependencies": {
+ "@types/estree": "^1.0.0",
+ "@types/hast": "^3.0.0",
+ "@types/unist": "^3.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "devlop": "^1.0.0",
+ "estree-util-is-identifier-name": "^3.0.0",
+ "hast-util-whitespace": "^3.0.0",
+ "mdast-util-mdx-expression": "^2.0.0",
+ "mdast-util-mdx-jsx": "^3.0.0",
+ "mdast-util-mdxjs-esm": "^2.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0",
+ "style-to-object": "^1.0.0",
+ "unist-util-position": "^5.0.0",
+ "vfile-message": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-to-jsx-runtime/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/hast-util-to-jsx-runtime/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/hast-util-to-jsx-runtime/node_modules/comma-separated-tokens": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz",
+ "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/hast-util-to-jsx-runtime/node_modules/property-information": {
+ "version": "6.4.1",
+ "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.4.1.tgz",
+ "integrity": "sha512-OHYtXfu5aI2sS2LWFSN5rgJjrQ4pCy8i1jubJLe2QvMF8JJ++HXTUIVWFLfXJoaOfvYYjk2SN8J2wFUWIGXT4w==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/hast-util-to-jsx-runtime/node_modules/space-separated-tokens": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz",
+ "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/hast-util-whitespace": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz",
+ "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==",
+ "dependencies": {
+ "@types/hast": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-whitespace/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
"node_modules/hastscript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz",
@@ -6428,6 +6665,15 @@
"react-is": "^16.7.0"
}
},
+ "node_modules/html-url-attributes": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.0.tgz",
+ "integrity": "sha512-/sXbVCWayk6GDVg3ctOX6nxaVj7So40FcFAnWlWGNAB1LpYKcV5Cd10APjPjW80O7zYW2MsjBV4zZ7IZO5fVow==",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/human-signals": {
"version": "4.3.1",
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz",
@@ -6519,6 +6765,11 @@
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
},
+ "node_modules/inline-style-parser": {
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.2.tgz",
+ "integrity": "sha512-EcKzdTHVe8wFVOGEYXiW9WmJXPjqi1T+234YpJr98RiFYKHV3cdy1+3mkTE+KHTHxFFLH51SfaGOoUdW+v7ViQ=="
+ },
"node_modules/internal-slot": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz",
@@ -6794,9 +7045,9 @@
}
},
"node_modules/is-negative-zero": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz",
- "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==",
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz",
+ "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==",
"engines": {
"node": ">= 0.4"
},
@@ -6834,6 +7085,17 @@
"node": ">=8"
}
},
+ "node_modules/is-plain-obj": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz",
+ "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/is-regex": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz",
@@ -6858,11 +7120,14 @@
}
},
"node_modules/is-shared-array-buffer": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz",
- "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==",
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz",
+ "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==",
"dependencies": {
- "call-bind": "^1.0.2"
+ "call-bind": "^1.0.7"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -7025,9 +7290,9 @@
}
},
"node_modules/jose": {
- "version": "4.15.4",
- "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.4.tgz",
- "integrity": "sha512-W+oqK4H+r5sITxfxpSU+MMdr/YSWGvgZMQDIsNoBDGGy4i7GBPTtvFKibQzW06n3U3TqHjhvBJsirShsEJ6eeQ==",
+ "version": "4.15.5",
+ "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz",
+ "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==",
"funding": {
"url": "https://github.com/sponsors/panva"
}
@@ -7210,16 +7475,37 @@
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
+ "node_modules/lodash.castarray": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz",
+ "integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==",
+ "dev": true
+ },
"node_modules/lodash.debounce": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
"integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow=="
},
+ "node_modules/lodash.isplainobject": {
+ "version": "4.0.6",
+ "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
+ "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
+ "dev": true
+ },
"node_modules/lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
+ "node_modules/longest-streak": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
+ "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -7263,49 +7549,997 @@
"node": ">=10"
}
},
- "node_modules/mdn-data": {
- "version": "2.0.30",
- "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
- "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="
- },
- "node_modules/memoize-one": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-6.0.0.tgz",
- "integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw=="
- },
- "node_modules/merge-stream": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
- "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
- "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
- "engines": {
- "node": ">= 8"
+ "node_modules/markdown-table": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz",
+ "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
}
},
- "node_modules/micromatch": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
- "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
+ "node_modules/mdast-util-find-and-replace": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz",
+ "integrity": "sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==",
"dependencies": {
- "braces": "^3.0.2",
- "picomatch": "^2.3.1"
+ "@types/mdast": "^4.0.0",
+ "escape-string-regexp": "^5.0.0",
+ "unist-util-is": "^6.0.0",
+ "unist-util-visit-parents": "^6.0.0"
},
- "engines": {
- "node": ">=8.6"
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
}
},
- "node_modules/mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+ "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
"engines": {
- "node": ">= 0.6"
- }
- },
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/mdast-util-from-markdown": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.0.tgz",
+ "integrity": "sha512-n7MTOr/z+8NAX/wmhhDji8O3bRvPTV/U0oTCaZJkjhPSKTPhS3xufVhKGF8s1pJ7Ox4QgoIU7KHseh09S+9rTA==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "@types/unist": "^3.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-to-string": "^4.0.0",
+ "micromark": "^4.0.0",
+ "micromark-util-decode-numeric-character-reference": "^2.0.0",
+ "micromark-util-decode-string": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0",
+ "unist-util-stringify-position": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-from-markdown/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/mdast-util-gfm": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz",
+ "integrity": "sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==",
+ "dependencies": {
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-gfm-autolink-literal": "^2.0.0",
+ "mdast-util-gfm-footnote": "^2.0.0",
+ "mdast-util-gfm-strikethrough": "^2.0.0",
+ "mdast-util-gfm-table": "^2.0.0",
+ "mdast-util-gfm-task-list-item": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-autolink-literal": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.0.tgz",
+ "integrity": "sha512-FyzMsduZZHSc3i0Px3PQcBT4WJY/X/RCtEJKuybiC6sjPqLv7h1yqAkmILZtuxMSsUyaLUWNp71+vQH2zqp5cg==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "ccount": "^2.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-find-and-replace": "^3.0.0",
+ "micromark-util-character": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-footnote": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz",
+ "integrity": "sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.1.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-strikethrough": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz",
+ "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-table": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz",
+ "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "markdown-table": "^3.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-task-list-item": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz",
+ "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-mdx-expression": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz",
+ "integrity": "sha512-fGCu8eWdKUKNu5mohVGkhBXCXGnOTLuFqOvGMvdikr+J1w7lDJgxThOKpwRWzzbyXAU2hhSwsmssOY4yTokluw==",
+ "dependencies": {
+ "@types/estree-jsx": "^1.0.0",
+ "@types/hast": "^3.0.0",
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-mdx-expression/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.0.tgz",
+ "integrity": "sha512-A8AJHlR7/wPQ3+Jre1+1rq040fX9A4Q1jG8JxmSNp/PLPHg80A6475wxTp3KzHpApFH6yWxFotHrJQA3dXP6/w==",
+ "dependencies": {
+ "@types/estree-jsx": "^1.0.0",
+ "@types/hast": "^3.0.0",
+ "@types/mdast": "^4.0.0",
+ "@types/unist": "^3.0.0",
+ "ccount": "^2.0.0",
+ "devlop": "^1.1.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0",
+ "parse-entities": "^4.0.0",
+ "stringify-entities": "^4.0.0",
+ "unist-util-remove-position": "^5.0.0",
+ "unist-util-stringify-position": "^4.0.0",
+ "vfile-message": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/character-entities": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
+ "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/character-entities-legacy": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
+ "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/character-reference-invalid": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz",
+ "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/is-alphabetical": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz",
+ "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/is-alphanumerical": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz",
+ "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==",
+ "dependencies": {
+ "is-alphabetical": "^2.0.0",
+ "is-decimal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/is-decimal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz",
+ "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/is-hexadecimal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz",
+ "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/parse-entities": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz",
+ "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==",
+ "dependencies": {
+ "@types/unist": "^2.0.0",
+ "character-entities": "^2.0.0",
+ "character-entities-legacy": "^3.0.0",
+ "character-reference-invalid": "^2.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "is-alphanumerical": "^2.0.0",
+ "is-decimal": "^2.0.0",
+ "is-hexadecimal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/mdast-util-mdx-jsx/node_modules/parse-entities/node_modules/@types/unist": {
+ "version": "2.0.10",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz",
+ "integrity": "sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA=="
+ },
+ "node_modules/mdast-util-mdxjs-esm": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz",
+ "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==",
+ "dependencies": {
+ "@types/estree-jsx": "^1.0.0",
+ "@types/hast": "^3.0.0",
+ "@types/mdast": "^4.0.0",
+ "devlop": "^1.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "mdast-util-to-markdown": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-mdxjs-esm/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/mdast-util-phrasing": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
+ "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "unist-util-is": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-to-hast": {
+ "version": "13.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.1.0.tgz",
+ "integrity": "sha512-/e2l/6+OdGp/FB+ctrJ9Avz71AN/GRH3oi/3KAx/kMnoUsD6q0woXlDT8lLEeViVKE7oZxE7RXzvO3T8kF2/sA==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "@types/mdast": "^4.0.0",
+ "@ungap/structured-clone": "^1.0.0",
+ "devlop": "^1.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "trim-lines": "^3.0.0",
+ "unist-util-position": "^5.0.0",
+ "unist-util-visit": "^5.0.0",
+ "vfile": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-to-hast/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/mdast-util-to-markdown": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz",
+ "integrity": "sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "@types/unist": "^3.0.0",
+ "longest-streak": "^3.0.0",
+ "mdast-util-phrasing": "^4.0.0",
+ "mdast-util-to-string": "^4.0.0",
+ "micromark-util-decode-string": "^2.0.0",
+ "unist-util-visit": "^5.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-to-markdown/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/mdast-util-to-string": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+ "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdn-data": {
+ "version": "2.0.30",
+ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
+ "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="
+ },
+ "node_modules/memoize-one": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-6.0.0.tgz",
+ "integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw=="
+ },
+ "node_modules/merge-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
+ "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="
+ },
+ "node_modules/merge2": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
+ "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/micromark": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.0.tgz",
+ "integrity": "sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "@types/debug": "^4.0.0",
+ "debug": "^4.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "devlop": "^1.0.0",
+ "micromark-core-commonmark": "^2.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-combine-extensions": "^2.0.0",
+ "micromark-util-decode-numeric-character-reference": "^2.0.0",
+ "micromark-util-encode": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-resolve-all": "^2.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "micromark-util-subtokenize": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-core-commonmark": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.0.tgz",
+ "integrity": "sha512-jThOz/pVmAYUtkroV3D5c1osFXAMv9e0ypGDOIZuCeAe91/sD6BoE2Sjzt30yuXtwOYUmySOhMas/PVyh02itA==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "decode-named-character-reference": "^1.0.0",
+ "devlop": "^1.0.0",
+ "micromark-factory-destination": "^2.0.0",
+ "micromark-factory-label": "^2.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-factory-title": "^2.0.0",
+ "micromark-factory-whitespace": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-classify-character": "^2.0.0",
+ "micromark-util-html-tag-name": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-resolve-all": "^2.0.0",
+ "micromark-util-subtokenize": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-extension-gfm": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz",
+ "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==",
+ "dependencies": {
+ "micromark-extension-gfm-autolink-literal": "^2.0.0",
+ "micromark-extension-gfm-footnote": "^2.0.0",
+ "micromark-extension-gfm-strikethrough": "^2.0.0",
+ "micromark-extension-gfm-table": "^2.0.0",
+ "micromark-extension-gfm-tagfilter": "^2.0.0",
+ "micromark-extension-gfm-task-list-item": "^2.0.0",
+ "micromark-util-combine-extensions": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-autolink-literal": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.0.0.tgz",
+ "integrity": "sha512-rTHfnpt/Q7dEAK1Y5ii0W8bhfJlVJFnJMHIPisfPK3gpVNuOP0VnRl96+YJ3RYWV/P4gFeQoGKNlT3RhuvpqAg==",
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-footnote": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.0.0.tgz",
+ "integrity": "sha512-6Rzu0CYRKDv3BfLAUnZsSlzx3ak6HAoI85KTiijuKIz5UxZxbUI+pD6oHgw+6UtQuiRwnGRhzMmPRv4smcz0fg==",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-core-commonmark": "^2.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-normalize-identifier": "^2.0.0",
+ "micromark-util-sanitize-uri": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-strikethrough": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.0.0.tgz",
+ "integrity": "sha512-c3BR1ClMp5fxxmwP6AoOY2fXO9U8uFMKs4ADD66ahLTNcwzSCyRVU4k7LPV5Nxo/VJiR4TdzxRQY2v3qIUceCw==",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-classify-character": "^2.0.0",
+ "micromark-util-resolve-all": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-table": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.0.0.tgz",
+ "integrity": "sha512-PoHlhypg1ItIucOaHmKE8fbin3vTLpDOUg8KAr8gRCF1MOZI9Nquq2i/44wFvviM4WuxJzc3demT8Y3dkfvYrw==",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-tagfilter": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz",
+ "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==",
+ "dependencies": {
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-task-list-item": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.0.1.tgz",
+ "integrity": "sha512-cY5PzGcnULaN5O7T+cOzfMoHjBW7j+T9D2sucA5d/KbsBTPcYdebm9zUd9zzdgJGCwahV+/W78Z3nbulBYVbTw==",
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-factory-destination": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz",
+ "integrity": "sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-label": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz",
+ "integrity": "sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-space": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz",
+ "integrity": "sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-title": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz",
+ "integrity": "sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-factory-whitespace": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz",
+ "integrity": "sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-factory-space": "^2.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-character": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz",
+ "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-chunked": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz",
+ "integrity": "sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-classify-character": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz",
+ "integrity": "sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-combine-extensions": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz",
+ "integrity": "sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-decode-numeric-character-reference": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz",
+ "integrity": "sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-decode-string": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.0.tgz",
+ "integrity": "sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "decode-named-character-reference": "^1.0.0",
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-decode-numeric-character-reference": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-encode": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz",
+ "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ]
+ },
+ "node_modules/micromark-util-html-tag-name": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz",
+ "integrity": "sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ]
+ },
+ "node_modules/micromark-util-normalize-identifier": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz",
+ "integrity": "sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-resolve-all": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz",
+ "integrity": "sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-sanitize-uri": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz",
+ "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-character": "^2.0.0",
+ "micromark-util-encode": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-subtokenize": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.0.tgz",
+ "integrity": "sha512-vc93L1t+gpR3p8jxeVdaYlbV2jTYteDje19rNSS/H5dlhxUYll5Fy6vJ2cDwP8RnsXi818yGty1ayP55y3W6fg==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "devlop": "^1.0.0",
+ "micromark-util-chunked": "^2.0.0",
+ "micromark-util-symbol": "^2.0.0",
+ "micromark-util-types": "^2.0.0"
+ }
+ },
+ "node_modules/micromark-util-symbol": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz",
+ "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ]
+ },
+ "node_modules/micromark-util-types": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz",
+ "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ]
+ },
+ "node_modules/micromatch": {
+ "version": "4.0.5",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
+ "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
+ "dependencies": {
+ "braces": "^3.0.2",
+ "picomatch": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=8.6"
+ }
+ },
+ "node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
@@ -7422,11 +8656,11 @@
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="
},
"node_modules/next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/next/-/next-14.1.0.tgz",
- "integrity": "sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/next/-/next-14.1.1.tgz",
+ "integrity": "sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==",
"dependencies": {
- "@next/env": "14.1.0",
+ "@next/env": "14.1.1",
"@swc/helpers": "0.5.2",
"busboy": "1.6.0",
"caniuse-lite": "^1.0.30001579",
@@ -7441,15 +8675,15 @@
"node": ">=18.17.0"
},
"optionalDependencies": {
- "@next/swc-darwin-arm64": "14.1.0",
- "@next/swc-darwin-x64": "14.1.0",
- "@next/swc-linux-arm64-gnu": "14.1.0",
- "@next/swc-linux-arm64-musl": "14.1.0",
- "@next/swc-linux-x64-gnu": "14.1.0",
- "@next/swc-linux-x64-musl": "14.1.0",
- "@next/swc-win32-arm64-msvc": "14.1.0",
- "@next/swc-win32-ia32-msvc": "14.1.0",
- "@next/swc-win32-x64-msvc": "14.1.0"
+ "@next/swc-darwin-arm64": "14.1.1",
+ "@next/swc-darwin-x64": "14.1.1",
+ "@next/swc-linux-arm64-gnu": "14.1.1",
+ "@next/swc-linux-arm64-musl": "14.1.1",
+ "@next/swc-linux-x64-gnu": "14.1.1",
+ "@next/swc-linux-x64-musl": "14.1.1",
+ "@next/swc-win32-arm64-msvc": "14.1.1",
+ "@next/swc-win32-ia32-msvc": "14.1.1",
+ "@next/swc-win32-x64-msvc": "14.1.1"
},
"peerDependencies": {
"@opentelemetry/api": "^1.1.0",
@@ -7530,9 +8764,9 @@
}
},
"node_modules/node-abi": {
- "version": "3.54.0",
- "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.54.0.tgz",
- "integrity": "sha512-p7eGEiQil0YUV3ItH4/tBb781L5impVmmx2E9FRKF7d18XXzp4PGT2tdYMFY6wQqgxD0IwNZOiSJ0/K0fSi/OA==",
+ "version": "3.56.0",
+ "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.56.0.tgz",
+ "integrity": "sha512-fZjdhDOeRcaS+rcpve7XuwHBmktS1nS1gzgghwKUQQ8nTy2FdSDr6ZT8k6YhvlJeHmmQMYiT/IH9hfco5zeW2Q==",
"dependencies": {
"semver": "^7.3.5"
},
@@ -7567,9 +8801,9 @@
}
},
"node_modules/npm-run-path": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.2.0.tgz",
- "integrity": "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg==",
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz",
+ "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==",
"dependencies": {
"path-key": "^4.0.0"
},
@@ -7632,12 +8866,12 @@
}
},
"node_modules/object-is": {
- "version": "1.1.5",
- "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz",
- "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==",
+ "version": "1.1.6",
+ "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.6.tgz",
+ "integrity": "sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q==",
"dependencies": {
- "call-bind": "^1.0.2",
- "define-properties": "^1.1.3"
+ "call-bind": "^1.0.7",
+ "define-properties": "^1.2.1"
},
"engines": {
"node": ">= 0.4"
@@ -7975,6 +9209,14 @@
"node": ">= 6"
}
},
+ "node_modules/possible-typed-array-names": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz",
+ "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/postcss": {
"version": "8.4.35",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
@@ -8071,9 +9313,9 @@
}
},
"node_modules/postcss-load-config/node_modules/lilconfig": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.0.tgz",
- "integrity": "sha512-p3cz0JV5vw/XeouBU3Ldnp+ZkBjE+n8ydJ4mcwBrOiXXPqNlrzGBqWs9X4MWF7f+iKUBu794Y8Hh8yawiJbCjw==",
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz",
+ "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==",
"engines": {
"node": ">=14"
},
@@ -8117,9 +9359,9 @@
"integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="
},
"node_modules/posthog-js": {
- "version": "1.106.0",
- "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.106.0.tgz",
- "integrity": "sha512-TjzyP/pbmttDJk8M7298LKWyXa5LQLJbc+OFvNJhznbUtSs+CW+k8vr5fXYjj2nF3biigG/TJgR0M3C7AgeXzQ==",
+ "version": "1.116.2",
+ "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.116.2.tgz",
+ "integrity": "sha512-s76+xH91mLt/J05sy7XV4/BzfwTJFU+BoBcv9adYOjD3SAUjUFg21+G2rq9A1rRvMHh0lms4t16TRMlO4L463A==",
"dependencies": {
"fflate": "^0.4.8",
"preact": "^10.19.3"
@@ -8138,9 +9380,9 @@
}
},
"node_modules/preact": {
- "version": "10.19.4",
- "resolved": "https://registry.npmjs.org/preact/-/preact-10.19.4.tgz",
- "integrity": "sha512-dwaX5jAh0Ga8uENBX1hSOujmKWgx9RtL80KaKUFLc6jb4vCEAc3EeZ0rnQO/FO4VgjfPMfoLFWnNG8bHuZ9VLw==",
+ "version": "10.19.6",
+ "resolved": "https://registry.npmjs.org/preact/-/preact-10.19.6.tgz",
+ "integrity": "sha512-gympg+T2Z1fG1unB8NH29yHJwnEaCH37Z32diPDku316OTnRPeMbiRV9kTrfZpocXjdfnWuFUl/Mj4BHaf6gnw==",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/preact"
@@ -8158,9 +9400,9 @@
}
},
"node_modules/prebuild-install": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz",
- "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==",
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.2.tgz",
+ "integrity": "sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==",
"dependencies": {
"detect-libc": "^2.0.0",
"expand-template": "^2.0.3",
@@ -8439,9 +9681,9 @@
}
},
"node_modules/react-datepicker": {
- "version": "6.1.0",
- "resolved": "https://registry.npmjs.org/react-datepicker/-/react-datepicker-6.1.0.tgz",
- "integrity": "sha512-8uz+hAOpvHqZGvD4Ky1hJ0/tLI4S9B0Gu9LV7LtLxRKXODs/xrxEay0aMVp7AW9iizTeImZh/6aA00fFaRZpJw==",
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/react-datepicker/-/react-datepicker-6.2.0.tgz",
+ "integrity": "sha512-GzEOiE6yLfp9P6XNkOhXuYtZHzoAx3tirbi7/dj2WHlGM+NGE1lefceqGR0ZrYsYaqsNJhIJFTgwUpzVzA+mjw==",
"dependencies": {
"@floating-ui/react": "^0.26.2",
"classnames": "^2.2.6",
@@ -8503,9 +9745,9 @@
}
},
"node_modules/react-hook-form": {
- "version": "7.50.1",
- "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.50.1.tgz",
- "integrity": "sha512-3PCY82oE0WgeOgUtIr3nYNNtNvqtJ7BZjsbxh6TnYNbXButaD5WpjOmTjdxZfheuHKR68qfeFnEDVYoSSFPMTQ==",
+ "version": "7.51.0",
+ "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.51.0.tgz",
+ "integrity": "sha512-BggOy5j58RdhdMzzRUHGOYhSz1oeylFAv6jUSG86OvCIvlAvS7KvnRY7yoAf2pfEiPN7BesnR0xx73nEk3qIiw==",
"engines": {
"node": ">=12.22.0"
},
@@ -8552,6 +9794,39 @@
"react": ">=16.8.0"
}
},
+ "node_modules/react-markdown": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-9.0.1.tgz",
+ "integrity": "sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "devlop": "^1.0.0",
+ "hast-util-to-jsx-runtime": "^2.0.0",
+ "html-url-attributes": "^3.0.0",
+ "mdast-util-to-hast": "^13.0.0",
+ "remark-parse": "^11.0.0",
+ "remark-rehype": "^11.0.0",
+ "unified": "^11.0.0",
+ "unist-util-visit": "^5.0.0",
+ "vfile": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ },
+ "peerDependencies": {
+ "@types/react": ">=18",
+ "react": ">=18"
+ }
+ },
+ "node_modules/react-markdown/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
"node_modules/react-modal": {
"version": "3.16.1",
"resolved": "https://registry.npmjs.org/react-modal/-/react-modal-3.16.1.tgz",
@@ -8797,13 +10072,13 @@
}
},
"node_modules/recharts": {
- "version": "2.12.0",
- "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.0.tgz",
- "integrity": "sha512-rVNcdNQ5b7+40Ue7mcEKZJyEv+3SUk2bDEVvOyXPDXXVE7TU3lrvnJUgAvO36hSzhRP2DnAamKXvHLFIFOU0Ww==",
+ "version": "2.12.2",
+ "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.2.tgz",
+ "integrity": "sha512-9bpxjXSF5g81YsKkTSlaX7mM4b6oYI1mIYck6YkUcWuL3tomADccI51/6thY4LmvhYuRTwpfrOvE80Zc3oBRfQ==",
"dependencies": {
"clsx": "^2.0.0",
"eventemitter3": "^4.0.1",
- "lodash": "^4.17.19",
+ "lodash": "^4.17.21",
"react-is": "^16.10.2",
"react-smooth": "^4.0.0",
"recharts-scale": "^0.4.4",
@@ -8974,6 +10249,76 @@
"jsesc": "bin/jsesc"
}
},
+ "node_modules/remark-gfm": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz",
+ "integrity": "sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "mdast-util-gfm": "^3.0.0",
+ "micromark-extension-gfm": "^3.0.0",
+ "remark-parse": "^11.0.0",
+ "remark-stringify": "^11.0.0",
+ "unified": "^11.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/remark-parse": {
+ "version": "11.0.0",
+ "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
+ "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "mdast-util-from-markdown": "^2.0.0",
+ "micromark-util-types": "^2.0.0",
+ "unified": "^11.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/remark-rehype": {
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.0.tgz",
+ "integrity": "sha512-z3tJrAs2kIs1AqIIy6pzHmAHlF1hWQ+OdY4/hv+Wxe35EhyLKcajL33iUEn3ScxtFox9nUvRufR/Zre8Q08H/g==",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "@types/mdast": "^4.0.0",
+ "mdast-util-to-hast": "^13.0.0",
+ "unified": "^11.0.0",
+ "vfile": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/remark-rehype/node_modules/@types/hast": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+ "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+ "dependencies": {
+ "@types/unist": "*"
+ }
+ },
+ "node_modules/remark-stringify": {
+ "version": "11.0.0",
+ "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz",
+ "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==",
+ "dependencies": {
+ "@types/mdast": "^4.0.0",
+ "mdast-util-to-markdown": "^2.0.0",
+ "unified": "^11.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/resolve": {
"version": "1.22.8",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz",
@@ -9205,9 +10550,9 @@
}
},
"node_modules/sass": {
- "version": "1.70.0",
- "resolved": "https://registry.npmjs.org/sass/-/sass-1.70.0.tgz",
- "integrity": "sha512-uUxNQ3zAHeAx5nRFskBnrWzDUJrrvpCPD5FNAoRvTi0WwremlheES3tg+56PaVtCs5QDRX5CBLxxKMDJMEa1WQ==",
+ "version": "1.71.1",
+ "resolved": "https://registry.npmjs.org/sass/-/sass-1.71.1.tgz",
+ "integrity": "sha512-wovtnV2PxzteLlfNzbgm1tFXPLoZILYAMJtvoXXkD7/+1uP41eKkIt1ypWq5/q2uT94qHjXehEYfmjKOvjL9sg==",
"dependencies": {
"chokidar": ">=3.0.0 <4.0.0",
"immutable": "^4.0.0",
@@ -9290,13 +10635,14 @@
}
},
"node_modules/set-function-name": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz",
- "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==",
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz",
+ "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==",
"dependencies": {
- "define-data-property": "^1.0.1",
+ "define-data-property": "^1.1.4",
+ "es-errors": "^1.3.0",
"functions-have-names": "^1.2.3",
- "has-property-descriptors": "^1.0.0"
+ "has-property-descriptors": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -9349,11 +10695,11 @@
}
},
"node_modules/side-channel": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz",
- "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==",
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
+ "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"dependencies": {
- "call-bind": "^1.0.6",
+ "call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
@@ -9488,9 +10834,9 @@
}
},
"node_modules/streamx": {
- "version": "2.15.8",
- "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.15.8.tgz",
- "integrity": "sha512-6pwMeMY/SuISiRsuS8TeIrAzyFbG5gGPHFQsYjUr/pbBadaL1PCWmzKw+CHZSwainfvcF6Si6cVLq4XTEwswFQ==",
+ "version": "2.16.1",
+ "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz",
+ "integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==",
"dependencies": {
"fast-fifo": "^1.1.0",
"queue-tick": "^1.0.1"
@@ -9628,6 +10974,28 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/stringify-entities": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.3.tgz",
+ "integrity": "sha512-BP9nNHMhhfcMbiuQKCqMjhDP5yBCAxsPu4pHFFzJ6Alo9dZgY4VLDPutXqIjpRiMoKdp7Av85Gr73Q5uH9k7+g==",
+ "dependencies": {
+ "character-entities-html4": "^2.0.0",
+ "character-entities-legacy": "^3.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/stringify-entities/node_modules/character-entities-legacy": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
+ "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
@@ -9681,6 +11049,14 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/style-to-object": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.5.tgz",
+ "integrity": "sha512-rDRwHtoDD3UMMrmZ6BzOW0naTjMsVZLIjsGleSKS/0Oz+cgCfAPRspaqJuE8rDzpKha/nEvnM0IF4seEAZUTKQ==",
+ "dependencies": {
+ "inline-style-parser": "0.2.2"
+ }
+ },
"node_modules/styled-components": {
"version": "6.1.8",
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-6.1.8.tgz",
@@ -9906,9 +11282,9 @@
}
},
"node_modules/swr": {
- "version": "2.2.4",
- "resolved": "https://registry.npmjs.org/swr/-/swr-2.2.4.tgz",
- "integrity": "sha512-njiZ/4RiIhoOlAaLYDqwz5qH/KZXVilRLvomrx83HjzCWTfa+InyfAjv05PSFxnmLzZkNO9ZfvgoqzAaEI4sGQ==",
+ "version": "2.2.5",
+ "resolved": "https://registry.npmjs.org/swr/-/swr-2.2.5.tgz",
+ "integrity": "sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==",
"dependencies": {
"client-only": "^0.0.1",
"use-sync-external-store": "^1.2.0"
@@ -10031,9 +11407,9 @@
}
},
"node_modules/tiny-invariant": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz",
- "integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw=="
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz",
+ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg=="
},
"node_modules/titleize": {
"version": "3.0.0",
@@ -10065,6 +11441,24 @@
"node": ">=8.0"
}
},
+ "node_modules/trim-lines": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
+ "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/trough": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz",
+ "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/ts-api-utils": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.2.1.tgz",
@@ -10155,11 +11549,11 @@
}
},
"node_modules/typed-array-buffer": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.1.tgz",
- "integrity": "sha512-RSqu1UEuSlrBhHTWC8O9FnPjOduNs4M7rJ4pRKoEjtx1zUNOPN2sSXHLDX+Y2WPbHIxbvg4JFo2DNAEfPIKWoQ==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz",
+ "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==",
"dependencies": {
- "call-bind": "^1.0.6",
+ "call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"is-typed-array": "^1.1.13"
},
@@ -10168,14 +11562,15 @@
}
},
"node_modules/typed-array-byte-length": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz",
- "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==",
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz",
+ "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==",
"dependencies": {
- "call-bind": "^1.0.2",
+ "call-bind": "^1.0.7",
"for-each": "^0.3.3",
- "has-proto": "^1.0.1",
- "is-typed-array": "^1.1.10"
+ "gopd": "^1.0.1",
+ "has-proto": "^1.0.3",
+ "is-typed-array": "^1.1.13"
},
"engines": {
"node": ">= 0.4"
@@ -10185,15 +11580,16 @@
}
},
"node_modules/typed-array-byte-offset": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz",
- "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz",
+ "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==",
"dependencies": {
- "available-typed-arrays": "^1.0.5",
- "call-bind": "^1.0.2",
+ "available-typed-arrays": "^1.0.7",
+ "call-bind": "^1.0.7",
"for-each": "^0.3.3",
- "has-proto": "^1.0.1",
- "is-typed-array": "^1.1.10"
+ "gopd": "^1.0.1",
+ "has-proto": "^1.0.3",
+ "is-typed-array": "^1.1.13"
},
"engines": {
"node": ">= 0.4"
@@ -10203,13 +11599,19 @@
}
},
"node_modules/typed-array-length": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz",
- "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==",
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.5.tgz",
+ "integrity": "sha512-yMi0PlwuznKHxKmcpoOdeLwxBoVPkqZxd7q2FgMkmD3bNwvF5VW0+UlUQ1k1vmktTu4Yu13Q0RIxEP8+B+wloA==",
"dependencies": {
- "call-bind": "^1.0.2",
+ "call-bind": "^1.0.7",
"for-each": "^0.3.3",
- "is-typed-array": "^1.1.9"
+ "gopd": "^1.0.1",
+ "has-proto": "^1.0.3",
+ "is-typed-array": "^1.1.13",
+ "possible-typed-array-names": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -10277,6 +11679,135 @@
"node": ">=4"
}
},
+ "node_modules/unified": {
+ "version": "11.0.4",
+ "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.4.tgz",
+ "integrity": "sha512-apMPnyLjAX+ty4OrNap7yumyVAMlKx5IWU2wlzzUdYJO9A8f1p9m/gywF/GM2ZDFcjQPrx59Mc90KwmxsoklxQ==",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "bail": "^2.0.0",
+ "devlop": "^1.0.0",
+ "extend": "^3.0.0",
+ "is-plain-obj": "^4.0.0",
+ "trough": "^2.0.0",
+ "vfile": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unified/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/unist-util-is": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+ "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+ "dependencies": {
+ "@types/unist": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-is/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/unist-util-position": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz",
+ "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==",
+ "dependencies": {
+ "@types/unist": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-position/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/unist-util-remove-position": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz",
+ "integrity": "sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-visit": "^5.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-remove-position/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/unist-util-stringify-position": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+ "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+ "dependencies": {
+ "@types/unist": "^3.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-stringify-position/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/unist-util-visit": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+ "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-is": "^6.0.0",
+ "unist-util-visit-parents": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-visit-parents": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+ "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-is": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/unist-util-visit-parents/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/unist-util-visit/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
"node_modules/untildify": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz",
@@ -10356,6 +11887,43 @@
"uuid": "dist/bin/uuid"
}
},
+ "node_modules/vfile": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.1.tgz",
+ "integrity": "sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-stringify-position": "^4.0.0",
+ "vfile-message": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/vfile-message": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz",
+ "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==",
+ "dependencies": {
+ "@types/unist": "^3.0.0",
+ "unist-util-stringify-position": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/vfile-message/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
+ "node_modules/vfile/node_modules/@types/unist": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz",
+ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ=="
+ },
"node_modules/victory-vendor": {
"version": "36.9.1",
"resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-36.9.1.tgz",
@@ -10577,9 +12145,12 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/yaml": {
- "version": "2.3.4",
- "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz",
- "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==",
+ "version": "2.4.0",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.0.tgz",
+ "integrity": "sha512-j9iR8g+/t0lArF4V6NE/QCfT+CO7iLqrXAHZbJdo+LfjqP1vR8Fg5bSiaq6Q2lOD1AUEVrEVIgABvBFYojJVYQ==",
+ "bin": {
+ "yaml": "bin.mjs"
+ },
"engines": {
"node": ">= 14"
}
@@ -10602,6 +12173,15 @@
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
+ },
+ "node_modules/zwitch": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
+ "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
}
}
}
diff --git a/keep-ui/package.json b/keep-ui/package.json
index 9987de62d..50013cdf4 100644
--- a/keep-ui/package.json
+++ b/keep-ui/package.json
@@ -134,7 +134,7 @@
"find-root": "^1.1.0",
"find-up": "^5.0.0",
"flat-cache": "^3.0.4",
- "follow-redirects": "^1.15.4",
+ "follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"fs.realpath": "^1.0.0",
"function-bind": "^1.1.1",
@@ -194,7 +194,7 @@
"isarray": "^2.0.5",
"isexe": "^2.0.0",
"jiti": "^1.18.2",
- "jose": "^4.14.4",
+ "jose": "^4.15.5",
"js-cookie": "^3.0.5",
"js-sdsl": "^4.4.0",
"js-tokens": "^4.0.0",
@@ -265,7 +265,7 @@
"postcss-nested": "^6.0.1",
"postcss-selector-parser": "^6.0.12",
"postcss-value-parser": "^4.2.0",
- "posthog-js": "^1.105.7",
+ "posthog-js": "^1.116.2",
"posthog-node": "^3.1.1",
"preact-render-to-string": "^5.2.6",
"prelude-ls": "^1.2.1",
@@ -283,6 +283,7 @@
"react-icons": "^4.9.0",
"react-loading": "^2.0.3",
"react-loading-skeleton": "^3.3.1",
+ "react-markdown": "^9.0.1",
"react-modal": "^3.16.1",
"react-name-initials-avatar": "^0.0.7",
"react-papaparse": "^4.4.0",
@@ -298,6 +299,7 @@
"regenerator-runtime": "^0.13.11",
"regexp.prototype.flags": "^1.5.0",
"regexpp": "^3.2.0",
+ "remark-gfm": "^4.0.0",
"resolve": "^1.22.2",
"resolve-from": "^4.0.0",
"reusify": "^1.0.4",
@@ -359,10 +361,13 @@
"zod": "^3.22.3"
},
"devDependencies": {
+ "@tailwindcss/typography": "^0.5.10",
"@types/js-cookie": "^3.0.3",
"@types/js-yaml": "^4.0.5",
+ "@types/json-logic-js": "^2.0.7",
+ "@types/json-query": "^2.2.6",
"@types/node": "20.2.1",
- "@types/react-datepicker": "^6.0.1",
+ "@types/react-datepicker": "^6.0.2",
"@types/react-modal": "^3.16.0",
"@types/uuid": "^9.0.1"
},
diff --git a/keep-ui/public/icons/azuremonitoring-icon.png b/keep-ui/public/icons/azuremonitoring-icon.png
new file mode 100644
index 000000000..caa6e0ae9
Binary files /dev/null and b/keep-ui/public/icons/azuremonitoring-icon.png differ
diff --git a/keep-ui/public/icons/gcpmonitoring-icon.png b/keep-ui/public/icons/gcpmonitoring-icon.png
new file mode 100644
index 000000000..be5f255f9
Binary files /dev/null and b/keep-ui/public/icons/gcpmonitoring-icon.png differ
diff --git a/keep-ui/public/icons/signalfx-icon.png b/keep-ui/public/icons/signalfx-icon.png
index 20208eb68..51f50465b 100644
Binary files a/keep-ui/public/icons/signalfx-icon.png and b/keep-ui/public/icons/signalfx-icon.png differ
diff --git a/keep-ui/public/icons/splunk-icon.png b/keep-ui/public/icons/splunk-icon.png
new file mode 100644
index 000000000..a40d29757
Binary files /dev/null and b/keep-ui/public/icons/splunk-icon.png differ
diff --git a/keep-ui/public/keep_loading_new.gif b/keep-ui/public/keep_loading_new.gif
deleted file mode 100644
index 0f983d171..000000000
Binary files a/keep-ui/public/keep_loading_new.gif and /dev/null differ
diff --git a/keep-ui/tailwind.config.js b/keep-ui/tailwind.config.js
index 973357d35..1fc010c80 100644
--- a/keep-ui/tailwind.config.js
+++ b/keep-ui/tailwind.config.js
@@ -129,5 +129,5 @@ module.exports = {
/^(fill-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
},
],
- plugins: [require("@headlessui/tailwindcss")],
+ plugins: [require("@headlessui/tailwindcss"), require('@tailwindcss/typography')],
};
diff --git a/keep-ui/tsconfig.json b/keep-ui/tsconfig.json
index 9a6c3b6b7..581abba93 100644
--- a/keep-ui/tsconfig.json
+++ b/keep-ui/tsconfig.json
@@ -1,7 +1,11 @@
{
"compilerOptions": {
"target": "es6",
- "lib": ["dom", "dom.iterable", "esnext"],
+ "lib": [
+ "dom",
+ "dom.iterable",
+ "esnext"
+ ],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
@@ -21,7 +25,9 @@
],
"baseUrl": ".",
"paths": {
- "@/components/*": ["./components/*"]
+ "@/components/*": [
+ "./components/*"
+ ]
}
},
"include": [
@@ -29,7 +35,10 @@
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts",
- "pages/signin.tsx"
+ "pages/signin.tsx",
+ "./.next/types/**/*.ts"
],
- "exclude": ["node_modules"]
+ "exclude": [
+ "node_modules"
+ ]
}
diff --git a/keep/api/alert_deduplicator/alert_deduplicator.py b/keep/api/alert_deduplicator/alert_deduplicator.py
index 886aad1b2..a16cf20af 100644
--- a/keep/api/alert_deduplicator/alert_deduplicator.py
+++ b/keep/api/alert_deduplicator/alert_deduplicator.py
@@ -5,7 +5,7 @@
import celpy
-from keep.api.core.db import get_alert_by_hash, get_all_filters
+from keep.api.core.db import get_all_filters, get_last_alert_hash_by_fingerprint
from keep.api.models.alert import AlertDto
@@ -35,8 +35,14 @@ def is_deduplicated(self, alert: AlertDto) -> bool:
).hexdigest()
# Check if the hash is already in the database
+ last_alert_hash_by_fingerprint = get_last_alert_hash_by_fingerprint(
+ self.tenant_id, alert.fingerprint
+ )
alert_deduplicate = (
- True if get_alert_by_hash(self.tenant_id, alert_hash) else False
+ True
+ if last_alert_hash_by_fingerprint
+ and last_alert_hash_by_fingerprint == alert_hash
+ else False
)
if alert_deduplicate:
self.logger.info(f"Alert {alert.id} is deduplicated {alert.source}")
diff --git a/keep/api/api.py b/keep/api/api.py
index ac746dd57..90e764bbe 100644
--- a/keep/api/api.py
+++ b/keep/api/api.py
@@ -1,11 +1,8 @@
import logging
import os
-import threading
-import time
from importlib import metadata
import jwt
-import requests
import uvicorn
from dotenv import find_dotenv, load_dotenv
from fastapi import FastAPI, HTTPException, Request, Response
@@ -201,40 +198,32 @@ def signin(body: dict):
"role": user.role,
}
- from fastapi import BackgroundTasks
+ @app.on_event("startup")
+ async def on_startup():
+ # load all providers into cache
+ from keep.providers.providers_factory import ProvidersFactory
- @app.post("/start-services")
- async def start_services(background_tasks: BackgroundTasks):
- logger.info("Starting the internal services")
+ logger.info("Loading providers into cache")
+ ProvidersFactory.get_all_providers()
+ logger.info("Providers loaded successfully")
+ # Start the services
+ logger.info("Starting the services")
+ # Start the scheduler
if SCHEDULER:
logger.info("Starting the scheduler")
wf_manager = WorkflowManager.get_instance()
- background_tasks.add_task(wf_manager.start)
+ await wf_manager.start()
logger.info("Scheduler started successfully")
-
+ # Start the consumer
if CONSUMER:
logger.info("Starting the consumer")
event_subscriber = EventSubscriber.get_instance()
- background_tasks.add_task(event_subscriber.start)
+ # TODO: there is some "race condition" since if the consumer starts before the server,
+ # and start getting events, it will fail since the server is not ready yet
+ # we should add a "wait" here to make sure the server is ready
+ await event_subscriber.start()
logger.info("Consumer started successfully")
-
- return {"status": "Services are starting in the background"}
-
- @app.on_event("startup")
- async def on_startup():
- # load all providers into cache
- from keep.providers.providers_factory import ProvidersFactory
-
- logger.info("Loading providers into cache")
- ProvidersFactory.get_all_providers()
- logger.info("Providers loaded successfully")
-
- # We want to start all internal services (workflowmanager, eventsubscriber, etc) only after the server is up
- # so we init a thread that will wait for the server to be up and then start the internal services
- # start the internal services
- logger.info("Starting the run services thread")
- thread = threading.Thread(target=run_services_after_app_is_up)
- thread.start()
+ logger.info("Services started successfully")
@app.exception_handler(Exception)
async def catch_exception(request: Request, exc: Exception):
@@ -264,46 +253,6 @@ async def log_middeware(request: Request, call_next):
return app
-def run_services_after_app_is_up():
- """Waits until the server is up and than invoking the 'start-services' endpoint to start the internal services"""
- logger.info("Waiting for the server to be ready")
- _wait_for_server_to_be_ready()
- logger.info("Server is ready, starting the internal services")
- # start the internal services
- try:
- # the internal services are always on localhost
- response = requests.post(f"http://localhost:{PORT}/start-services")
- response.raise_for_status()
- logger.info("Internal services started successfully")
- except Exception as e:
- logger.info("Failed to start internal services")
- raise e
-
-
-def _is_server_ready() -> bool:
- # poll localhost to see if the server is up
- try:
- # we are using hardcoded "localhost" to avoid problems where we start Keep on platform such as CloudRun where we have more than one instance
- response = requests.get(f"http://localhost:{PORT}/healthcheck", timeout=1)
- response.raise_for_status()
- return True
- except Exception:
- return False
-
-
-def _wait_for_server_to_be_ready():
- """Wait until the server is up by polling localhost"""
- start_time = time.time()
- while True:
- if _is_server_ready():
- return True
- if time.time() - start_time >= 60:
- raise TimeoutError("Server is not ready after 60 seconds.")
- else:
- logger.warning("Server is not ready yet, retrying in 1 second...")
- time.sleep(1)
-
-
def run(app: FastAPI):
logger.info("Starting the uvicorn server")
# call on starting to create the db and tables
diff --git a/keep/api/bl/enrichments.py b/keep/api/bl/enrichments.py
index 38690ac2a..e805ba5bc 100644
--- a/keep/api/bl/enrichments.py
+++ b/keep/api/bl/enrichments.py
@@ -13,6 +13,11 @@ def get_nested_attribute(obj, attr_path: str):
"""
attributes = attr_path.split(".")
for attr in attributes:
+ # @@ is used as a placeholder for . in cases where the attribute name has a .
+ # For example, we have {"results": {"some.attribute": "value"}}
+ # We can access it by using "results.some@@attribute" so we won't think its a nested attribute
+ if attr is not None and "@@" in attr:
+ attr = attr.replace("@@", ".")
obj = getattr(obj, attr, obj.get(attr) if isinstance(obj, dict) else None)
if obj is None:
return None
@@ -37,6 +42,7 @@ def run_mapping_rules(self, alert: AlertDto):
self.db_session.query(MappingRule)
.filter(MappingRule.tenant_id == self.tenant_id)
.filter(MappingRule.disabled == False)
+ .order_by(MappingRule.priority.desc())
.all()
)
@@ -59,6 +65,7 @@ def run_mapping_rules(self, alert: AlertDto):
for row in rule.rows:
if all(
get_nested_attribute(alert, attribute) == row.get(attribute)
+ or row.get(attribute) == "*" # Wildcard
for attribute in rule.matchers
):
self.logger.info(
@@ -73,6 +80,12 @@ def run_mapping_rules(self, alert: AlertDto):
for key, value in row.items()
if key not in rule.matchers
}
+
+ # Enrich the alert with the matched row
+ for key, value in enrichments.items():
+ setattr(alert, key, value)
+
+ # Save the enrichments to the database
enrich_alert(
self.tenant_id, alert.fingerprint, enrichments, self.db_session
)
diff --git a/keep/api/config.py b/keep/api/config.py
index 7442dcfdc..5c4d86d9c 100644
--- a/keep/api/config.py
+++ b/keep/api/config.py
@@ -2,6 +2,8 @@
import os
import keep.api.logging
+from keep.api.api import AUTH_TYPE
+from keep.api.core.config import AuthenticationType
from keep.api.core.db import create_db_and_tables, try_create_single_tenant
from keep.api.core.dependencies import SINGLE_TENANT_UUID
@@ -16,7 +18,10 @@ def on_starting(server=None):
logger.info("Keep server starting")
if not os.environ.get("SKIP_DB_CREATION", "false") == "true":
create_db_and_tables()
- try_create_single_tenant(SINGLE_TENANT_UUID)
+
+ # Create single tenant if it doesn't exist
+ if AUTH_TYPE == AuthenticationType.SINGLE_TENANT.value:
+ try_create_single_tenant(SINGLE_TENANT_UUID)
if os.environ.get("USE_NGROK", "false") == "true":
from pyngrok import ngrok
diff --git a/keep/api/core/db.py b/keep/api/core/db.py
index 39d3e28cf..052ae612d 100644
--- a/keep/api/core/db.py
+++ b/keep/api/core/db.py
@@ -2,7 +2,7 @@
import json
import logging
import os
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
from uuid import uuid4
import pymysql
@@ -14,6 +14,7 @@
from sqlalchemy.exc import IntegrityError, OperationalError, ProgrammingError
from sqlalchemy.orm import joinedload, selectinload, subqueryload
from sqlalchemy.orm.attributes import flag_modified
+from sqlalchemy_utils import create_database, database_exists
from sqlmodel import Session, SQLModel, create_engine, select
# This import is required to create the tables
@@ -105,6 +106,7 @@ def __get_conn_impersonate() -> pymysql.connections.Connection:
)
elif db_connection_string:
try:
+ logger.info(f"Creating a connection pool with size {pool_size}")
engine = create_engine(db_connection_string, pool_size=pool_size)
# SQLite does not support pool_size
except TypeError:
@@ -121,13 +123,18 @@ def create_db_and_tables():
"""
Creates the database and tables.
"""
- SQLModel.metadata.create_all(engine)
- # migration add column
-
- # todo: remove this
+ try:
+ if not database_exists(engine.url):
+ logger.info("Creating the database")
+ create_database(engine.url)
+ logger.info("Database created")
+ # On Cloud Run, it fails to check if the database exists
+ except Exception:
+ logger.warning("Failed to create the database or detect if it exists.")
+ pass
- # Execute the ALTER TABLE command
- with engine.connect() as connection:
+ # migrate the workflowtoexecution table
+ with Session(engine) as session:
try:
if engine.dialect.name == "mssql":
connection.execute(
@@ -147,9 +154,40 @@ def create_db_and_tables():
return
logger.exception("Failed to add column alert_hash to alert table")
raise
+
+ logger.info("Migrating WorkflowToAlertExecution table")
+ # get the foreign key constraint name
+ results = session.exec(
+ f"SELECT CONSTRAINT_NAME FROM information_schema.KEY_COLUMN_USAGE WHERE TABLE_SCHEMA = '{engine.url.database}' AND TABLE_NAME = 'workflowtoalertexecution' AND COLUMN_NAME = 'alert_fingerprint';"
+ )
+ # now remove it
+ for row in results:
+ constraint_name = row["CONSTRAINT_NAME"]
+ if constraint_name.startswith("workflowtoalertexecution"):
+ logger.info(f"Dropping constraint {constraint_name}")
+ session.exec(
+ f"ALTER TABLE workflowtoalertexecution DROP FOREIGN KEY {constraint_name};"
+ )
+ logger.info(f"Dropped constraint {constraint_name}")
+ # also add grouping_criteria to the workflow table
+ logger.info("Migrating Rule table")
+ try:
+ session.exec("ALTER TABLE rule ADD COLUMN grouping_criteria JSON;")
+ except Exception as e:
+ # that's ok
+ if "Duplicate column name" in str(e):
+ pass
+ # else, log
+ else:
+ logger.exception("Failed to migrate rule table")
+ pass
+ logger.info("Migrated Rule table")
+ session.commit()
+ logger.info("Migrated succesfully")
except Exception:
- logger.exception("Failed to add column alert_hash to alert table")
- raise
+ logger.exception("Failed to migrate table")
+ pass
+ SQLModel.metadata.create_all(engine)
def get_session() -> Session:
@@ -177,18 +215,39 @@ def try_create_single_tenant(tenant_id: str) -> None:
pass
with Session(engine) as session:
try:
- # Do everything related with single tenant creation in here
- session.add(Tenant(id=tenant_id, name="Single Tenant"))
- default_username = os.environ.get("KEEP_DEFAULT_USERNAME", "keep")
- default_password = hashlib.sha256(
- os.environ.get("KEEP_DEFAULT_PASSWORD", "keep").encode()
- ).hexdigest()
- default_user = User(
- username=default_username,
- password_hash=default_password,
- role=AdminRole.get_name(),
- )
- session.add(default_user)
+ # check if the tenant exist:
+ tenant = session.exec(select(Tenant).where(Tenant.id == tenant_id)).first()
+ if not tenant:
+ # Do everything related with single tenant creation in here
+ logger.info("Creating single tenant")
+ session.add(Tenant(id=tenant_id, name="Single Tenant"))
+ else:
+ logger.info("Single tenant already exists")
+
+ # now let's create the default user
+
+ # check if at least one user exists:
+ user = session.exec(select(User)).first()
+ # if no users exist, let's create the default user
+ if not user:
+ default_username = os.environ.get("KEEP_DEFAULT_USERNAME", "keep")
+ default_password = hashlib.sha256(
+ os.environ.get("KEEP_DEFAULT_PASSWORD", "keep").encode()
+ ).hexdigest()
+ default_user = User(
+ username=default_username,
+ password_hash=default_password,
+ role=AdminRole.get_name(),
+ )
+ session.add(default_user)
+ # else, if the user want to force the refresh of the default user password
+ elif os.environ.get("KEEP_FORCE_RESET_DEFAULT_PASSWORD", "false") == "true":
+ # update the password of the default user
+ default_password = hashlib.sha256(
+ os.environ.get("KEEP_DEFAULT_PASSWORD", "keep").encode()
+ ).hexdigest()
+ user.password_hash = default_password
+ # commit the changes
session.commit()
except IntegrityError:
# Tenant already exists
@@ -210,6 +269,21 @@ def try_create_single_tenant(tenant_id: str) -> None:
except Exception:
pass
+ # migrating presets table
+ with Session(engine) as session:
+ try:
+ logger.info("Migrating Preset table")
+ session.exec(
+ "ALTER TABLE preset ADD COLUMN is_private BOOLEAN NOT NULL DEFAULT 0;"
+ )
+ session.exec(
+ "ALTER TABLE preset ADD COLUMN created_by VARCHAR(1024) DEFAULT '';"
+ )
+ session.commit()
+ logger.info("Migrated Preset table")
+ except Exception:
+ pass
+
def create_workflow_execution(
workflow_id: str,
@@ -437,7 +511,13 @@ def get_workflows_with_last_execution(tenant_id: str) -> List[dict]:
WorkflowExecution.workflow_id,
func.max(WorkflowExecution.started).label("last_execution_time"),
)
+ .where(WorkflowExecution.tenant_id == tenant_id)
+ .where(
+ WorkflowExecution.started
+ >= datetime.now(tz=timezone.utc) - timedelta(days=7)
+ )
.group_by(WorkflowExecution.workflow_id)
+ .limit(1000)
.cte("latest_execution_cte")
)
@@ -544,7 +624,10 @@ def finish_workflow_execution(tenant_id, workflow_id, execution_id, status, erro
).first()
workflow_execution.status = status
- workflow_execution.error = error
+ # TODO: we had a bug with the error field, it was too short so some customers may fail over it.
+ # we need to fix it in the future, create a migration that increases the size of the error field
+ # and then we can remove the [:255] from here
+ workflow_execution.error = error[:255] if error else None
workflow_execution.execution_time = (
datetime.utcnow() - workflow_execution.started
).total_seconds()
@@ -733,20 +816,22 @@ def get_alerts_with_filters(tenant_id, provider_id=None, filters=None) -> list[A
if isinstance(filter_value, bool) and filter_value is True:
# If the filter value is True, we want to filter by the existence of the enrichment
# e.g.: all the alerts that have ticket_id
- if session.bind.dialect.name == "mssql":
+ if session.bind.dialect.name in ["mysql", "postgresql", "mssql"]:
query = query.filter(
- func.JSON_VALUE(
+ func.json_extract(
AlertEnrichment.enrichments, f"$.{filter_key}"
)
!= null()
)
- else:
+ elif session.bind.dialect.name == "sqlite":
query = query.filter(
- func.json_type(AlertEnrichment.enrichments, filter_path)
+ func.json_type(
+ AlertEnrichment.enrichments, f"$.{filter_key}"
+ )
!= null()
)
elif isinstance(filter_value, (str, int)):
- if session.bind.dialect.name == "mysql":
+ if session.bind.dialect.name in ["mysql", "postgresql"]:
query = query.filter(
func.json_unquote(
func.json_extract(
@@ -776,6 +861,8 @@ def get_alerts_with_filters(tenant_id, provider_id=None, filters=None) -> list[A
if provider_id:
query = query.filter(Alert.provider_id == provider_id)
+ query = query.order_by(Alert.timestamp.desc())
+
# Execute the query
alerts = query.all()
@@ -1131,9 +1218,17 @@ def assign_alert_to_group(
.where(Alert.fingerprint == fingerprint)
.order_by(Alert.timestamp.desc())
).first()
- group_alert.event["status"] = AlertStatus.RESOLVED.value
- # mark the event as modified so it will be updated in the database
- flag_modified(group_alert, "event")
+ # this is kinda wtf but sometimes we deleted manually
+ # these from the DB since it was too big
+ if not group_alert:
+ logger.warning(
+ f"Group {group.id} is expired, but the alert is not found. Did it was deleted manually?"
+ )
+ else:
+ group_alert.event["status"] = AlertStatus.RESOLVED.value
+ # mark the event as modified so it will be updated in the database
+ flag_modified(group_alert, "event")
+ # commit the changes
session.commit()
logger.info(f"Enriched group {group.id} with group_expired flag")
@@ -1194,7 +1289,7 @@ def get_rule_distribution(tenant_id, minute=False):
seven_days_ago = datetime.utcnow() - timedelta(days=1)
# Check the dialect
- if session.bind.dialect.name == "mysql":
+ if session.bind.dialect.name in ["mysql", "postgresql"]:
time_format = "%Y-%m-%d %H:%i" if minute else "%Y-%m-%d %H"
timestamp_format = func.date_format(AlertToGroup.timestamp, time_format)
elif session.bind.dialect.name == "sqlite":
@@ -1281,11 +1376,52 @@ def get_all_filters(tenant_id):
return filters
-def get_alert_by_hash(tenant_id, alert_hash):
+def get_last_alert_hash_by_fingerprint(tenant_id, fingerprint):
+ # get the last alert for a given fingerprint
+ # to check deduplication
with Session(engine) as session:
- alert = session.exec(
- select(Alert)
+ alert_hash = session.exec(
+ select(Alert.alert_hash)
.where(Alert.tenant_id == tenant_id)
- .where(Alert.alert_hash == alert_hash)
+ .where(Alert.fingerprint == fingerprint)
+ .order_by(Alert.timestamp.desc())
).first()
- return alert
+ return alert_hash
+
+
+def update_key_last_used(
+ tenant_id: str,
+ reference_id: str,
+) -> str:
+ """
+ Updates API key last used.
+
+ Args:
+ session (Session): _description_
+ tenant_id (str): _description_
+ reference_id (str): _description_
+
+ Returns:
+ str: _description_
+ """
+ with Session(engine) as session:
+ # Get API Key from database
+ statement = (
+ select(TenantApiKey)
+ .where(TenantApiKey.reference_id == reference_id)
+ .where(TenantApiKey.tenant_id == tenant_id)
+ )
+
+ tenant_api_key_entry = session.exec(statement).first()
+
+ # Update last used
+ if not tenant_api_key_entry:
+ # shouldn't happen but somehow happened to specific tenant so logging it
+ logger.error(
+ "API key not found",
+ extra={"tenant_id": tenant_id, "unique_api_key_id": unique_api_key_id},
+ )
+ return
+ tenant_api_key_entry.last_used = datetime.utcnow()
+ session.add(tenant_api_key_entry)
+ session.commit()
diff --git a/keep/api/core/dependencies.py b/keep/api/core/dependencies.py
index 6ff35a6bd..8b011d1de 100644
--- a/keep/api/core/dependencies.py
+++ b/keep/api/core/dependencies.py
@@ -15,7 +15,12 @@
from sqlmodel import Session
from keep.api.core.config import AuthenticationType
-from keep.api.core.db import get_api_key, get_session, get_user_by_api_key
+from keep.api.core.db import (
+ get_api_key,
+ get_session,
+ get_user_by_api_key,
+ update_key_last_used,
+)
from keep.api.core.rbac import Admin as AdminRole
from keep.api.core.rbac import get_role_by_role_name
@@ -210,6 +215,13 @@ def _verify_api_key(
tenant_api_key = get_api_key(api_key)
if not tenant_api_key:
raise HTTPException(status_code=401, detail="Invalid API Key")
+ # update last used
+ else:
+ logger.debug("Updating API Key last used")
+ update_key_last_used(
+ tenant_api_key.tenant_id, reference_id=tenant_api_key.reference_id
+ )
+ logger.debug("Successfully updated API Key last used")
# validate scopes
role = get_role_by_role_name(tenant_api_key.role)
@@ -294,6 +306,12 @@ def _verify_api_key(
if not tenant_api_key:
raise HTTPException(status_code=401, detail="Invalid API Key")
+ else:
+ logger.debug("Updating API Key last used")
+ update_key_last_used(
+ tenant_api_key.tenant_id, reference_id=tenant_api_key.reference_id
+ )
+ logger.debug("Successfully updated API Key last used")
role = get_role_by_role_name(tenant_api_key.role)
# validate scopes
diff --git a/keep/api/models/alert.py b/keep/api/models/alert.py
index 70bd310e1..1759c2dbf 100644
--- a/keep/api/models/alert.py
+++ b/keep/api/models/alert.py
@@ -1,5 +1,6 @@
import datetime
import hashlib
+import json
import logging
from enum import Enum
from typing import Any, Dict
@@ -73,10 +74,24 @@ class AlertDto(BaseModel):
group: bool = False # Whether the alert is a group alert
note: str | None = None # The note of the alert
+ def __str__(self) -> str:
+ # Convert the model instance to a dictionary
+ model_dict = self.dict()
+ return json.dumps(model_dict, indent=4, default=str)
+
@validator("fingerprint", pre=True, always=True)
def assign_fingerprint_if_none(cls, fingerprint, values):
+ # if its none, use the name
if fingerprint is None:
- return hashlib.sha256(values.get("name").encode()).hexdigest()
+ fingerprint_payload = values.get("name")
+ # if the alert name is None, than use the entire payload
+ if not fingerprint_payload:
+ logger.warning("No name to alert, using the entire payload")
+ fingerprint_payload = json.dumps(values)
+ fingerprint = hashlib.sha256(fingerprint_payload.encode()).hexdigest()
+ # take only the first 255 characters
+ else:
+ fingerprint = fingerprint[:255]
return fingerprint
@validator("deleted", pre=True, always=True)
@@ -86,6 +101,12 @@ def validate_deleted(cls, deleted, values):
if isinstance(deleted, list):
return values.get("lastReceived") in deleted
+ @validator("lastReceived", pre=True, always=True)
+ def validate_last_received(cls, last_received, values):
+ if not last_received:
+ last_received = datetime.datetime.now(datetime.timezone.utc).isoformat()
+ return last_received
+
@validator("dismissed", pre=True, always=True)
def validate_dismissed(cls, dismissed, values):
# normzlize dismissed value
@@ -99,7 +120,7 @@ def validate_dismissed(cls, dismissed, values):
# else, validate dismissedUntil
dismiss_until = values.get("dismissUntil")
# if there's no dismissUntil, return just return dismissed
- if not dismiss_until:
+ if not dismiss_until or dismiss_until == "forever":
return dismissed
# if there's dismissUntil, validate it
diff --git a/keep/api/models/db/preset.py b/keep/api/models/db/preset.py
index 75384440c..7491b29dd 100644
--- a/keep/api/models/db/preset.py
+++ b/keep/api/models/db/preset.py
@@ -1,5 +1,5 @@
from uuid import UUID, uuid4
-
+from typing import Optional
from pydantic import BaseModel
from sqlmodel import JSON, Column, Field, SQLModel
@@ -11,6 +11,10 @@ class Preset(SQLModel, table=True):
)
tenant_id: str = Field(foreign_key="tenant.id", index=True, max_length=36)
name: str = Field(unique=True, max_length=256)
+
+ # keeping index=True for better search
+ created_by: Optional[str] = Field(index=True, nullable=False)
+ is_private: Optional[bool] = Field(default=False)
options: list = Field(sa_column=Column(JSON)) # [{"label": "", "value": ""}]
@@ -18,6 +22,8 @@ class PresetDto(BaseModel, extra="ignore"):
id: UUID
name: str
options: list = []
+ created_by: Optional[str] = None
+ is_private: Optional[bool] = Field(default=False)
class PresetOption(BaseModel, extra="ignore"):
diff --git a/keep/api/models/db/workflow.py b/keep/api/models/db/workflow.py
index e80d85189..da432d18e 100644
--- a/keep/api/models/db/workflow.py
+++ b/keep/api/models/db/workflow.py
@@ -1,7 +1,7 @@
from datetime import datetime
from typing import List, Optional
-from sqlalchemy import TEXT, String
+from sqlalchemy import TEXT
from sqlmodel import JSON, Column, Field, Relationship, SQLModel, UniqueConstraint
@@ -34,7 +34,7 @@ class WorkflowExecution(SQLModel, table=True):
status: str = Field(max_length=1024)
execution_number: int
logs: Optional[str]
- error: Optional[str] = Field(sa_column=String(length=10240))
+ error: Optional[str] = Field(max_length=10240)
execution_time: Optional[int]
results: dict = Field(sa_column=Column(JSON), default={})
diff --git a/keep/api/models/webhook.py b/keep/api/models/webhook.py
index 16f660fa2..2afd2988f 100644
--- a/keep/api/models/webhook.py
+++ b/keep/api/models/webhook.py
@@ -10,3 +10,4 @@ class WebhookSettings(BaseModel):
class ProviderWebhookSettings(BaseModel):
webhookDescription: str | None = None
webhookTemplate: str
+ webhookMarkdown: str | None = None
diff --git a/keep/api/routes/alerts.py b/keep/api/routes/alerts.py
index 51da48134..42ee84a72 100644
--- a/keep/api/routes/alerts.py
+++ b/keep/api/routes/alerts.py
@@ -31,7 +31,6 @@
from keep.api.models.db.alert import Alert, AlertRaw
from keep.api.utils.email_utils import EmailTemplates, send_email
from keep.api.utils.enrichment_helpers import parse_and_enrich_deleted_and_assignees
-from keep.api.utils.tenant_utils import update_key_last_used
from keep.contextmanager.contextmanager import ContextManager
from keep.providers.providers_factory import ProvidersFactory
from keep.rulesengine.rulesengine import RulesEngine
@@ -204,7 +203,7 @@ def pull_alerts_from_providers(
},
)
except Exception as e:
- logger.warn(
+ logger.warning(
f"Could not fetch alerts from provider due to {e}",
extra={
"provider_id": provider.id,
@@ -288,7 +287,7 @@ def get_alert_history(
).get(fingerprint, [])
enriched_alerts_dto.extend(pulled_alerts_history)
except Exception:
- logger.warn(
+ logger.warning(
"Failed to pull alerts history from installed provider",
extra={
"provider_id": provider_id,
@@ -659,13 +658,6 @@ async def receive_generic_event(
pusher_client,
)
- if authenticated_entity.api_key_name:
- logger.debug("Updating API Key last used")
- update_key_last_used(
- session, tenant_id, unique_api_key_id=authenticated_entity.api_key_name
- )
- logger.debug("Successfully updated API Key last used")
-
return alert
@@ -719,9 +711,17 @@ async def receive_event(
"tenant_id": tenant_id,
},
)
- # tb: if we want to have fingerprint_fields configured by the user, format_alert
- # needs to be called from an initalized provider instance instead of a static method.
- formatted_events = provider_class.format_alert(event)
+
+ # if we have provider id, let's try to init the provider class with it
+ provider_instance = None
+ if provider_id:
+ try:
+ provider_instance = ProvidersFactory.get_installed_provider(
+ tenant_id, provider_id, provider_type
+ )
+ except Exception as e:
+ logger.warning(f"Failed to get provider instance due to {str(e)}")
+ formatted_events = provider_class.format_alert(event, provider_instance)
if isinstance(formatted_events, AlertDto):
formatted_events = [formatted_events]
diff --git a/keep/api/routes/preset.py b/keep/api/routes/preset.py
index b3e448d5d..1b899b789 100644
--- a/keep/api/routes/preset.py
+++ b/keep/api/routes/preset.py
@@ -2,7 +2,8 @@
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
-from sqlmodel import Session, select
+from sqlmodel import Session, select, or_
+
from keep.api.core.db import get_session
from keep.api.core.dependencies import AuthenticatedEntity, AuthVerifier
@@ -22,7 +23,19 @@ def get_presets(
) -> list[PresetDto]:
tenant_id = authenticated_entity.tenant_id
logger.info("Getting all presets")
- statement = select(Preset).where(Preset.tenant_id == tenant_id)
+
+ # both global and private presets
+ statement = (
+ select(Preset)
+ .where(Preset.tenant_id == tenant_id)
+ .where(
+ or_(
+ Preset.is_private == False,
+ Preset.created_by == authenticated_entity.email,
+ )
+ )
+ )
+
presets = session.exec(statement).all()
logger.info("Got all presets")
return [PresetDto(**preset.dict()) for preset in presets]
@@ -31,6 +44,7 @@ def get_presets(
class CreateOrUpdatePresetDto(BaseModel):
name: str | None
options: list[PresetOption]
+ is_private: bool = False # if true visible to all users of that tenant
@router.post("", description="Create a preset for tenant")
@@ -40,13 +54,22 @@ def create_preset(
session: Session = Depends(get_session),
) -> PresetDto:
tenant_id = authenticated_entity.tenant_id
- logger.info("Creating preset")
if not body.options or not body.name:
raise HTTPException(400, "Options and name are required")
if body.name == "Feed" or body.name == "Deleted":
raise HTTPException(400, "Cannot create preset with this name")
options_dict = [option.dict() for option in body.options]
- preset = Preset(tenant_id=tenant_id, options=options_dict, name=body.name)
+
+ created_by = authenticated_entity.email
+
+ preset = Preset(
+ tenant_id=tenant_id,
+ options=options_dict,
+ name=body.name,
+ created_by=created_by,
+ is_private=body.is_private,
+ )
+
session.add(preset)
session.commit()
session.refresh(preset)
@@ -100,6 +123,7 @@ def update_preset(
raise HTTPException(400, "Cannot create preset with this name")
if body.name != preset.name:
preset.name = body.name
+ preset.is_private = body.is_private
options_dict = [option.dict() for option in body.options]
if not options_dict:
raise HTTPException(400, "Options cannot be empty")
diff --git a/keep/api/routes/providers.py b/keep/api/routes/providers.py
index 144db59e6..a1fbf66a0 100644
--- a/keep/api/routes/providers.py
+++ b/keep/api/routes/providers.py
@@ -5,9 +5,7 @@
from typing import Callable, Optional
import sqlalchemy
-import yaml
from fastapi import APIRouter, Body, Depends, HTTPException, Request
-from fastapi import UploadFile as fastapiuploadfile
from fastapi.responses import JSONResponse
from sqlalchemy.exc import IntegrityError
from sqlmodel import Session, select
@@ -418,7 +416,6 @@ def validate_provider_scopes(
async def update_provider(
provider_id: str,
request: Request,
- file: fastapiuploadfile = None,
authenticated_entity: AuthenticatedEntity = Depends(
AuthVerifier(["update:providers"])
),
@@ -432,7 +429,12 @@ async def update_provider(
"provider_id": provider_id,
},
)
- provider_info = await __get_provider_raw_data(request, file)
+ try:
+ provider_info = await request.json()
+ except Exception:
+ # If error occurs (likely not JSON), try to get as form data
+ form_data = await request.form()
+ provider_info = dict(form_data)
if not provider_info:
raise HTTPException(status_code=400, detail="No valid data provided")
@@ -478,22 +480,9 @@ async def update_provider(
}
-async def __get_provider_raw_data(request: Request, file: fastapiuploadfile) -> dict:
- try:
- if file:
- provider_raw_data = await file.read()
- else:
- provider_raw_data = await request.body()
- provider_data = yaml.safe_load(provider_raw_data)
- except yaml.YAMLError:
- raise HTTPException(status_code=400, detail="Invalid YAML format")
- return provider_data
-
-
@router.post("/install")
async def install_provider(
request: Request,
- file: fastapiuploadfile = None,
authenticated_entity: AuthenticatedEntity = Depends(
AuthVerifier(["write:providers"])
),
@@ -501,7 +490,12 @@ async def install_provider(
):
tenant_id = authenticated_entity.tenant_id
installed_by = authenticated_entity.email
- provider_info = await __get_provider_raw_data(request, file)
+ try:
+ provider_info = await request.json()
+ except Exception:
+ # If error occurs (likely not JSON), try to get as form data
+ form_data = await request.form()
+ provider_info = dict(form_data)
if not provider_info:
raise HTTPException(status_code=400, detail="No valid data provided")
@@ -634,6 +628,8 @@ async def install_provider_oauth2(
context_manager, provider_unique_id, provider_type, provider_config
)
+ validated_scopes = validate_scopes(provider)
+
secret_manager = SecretManagerFactory.get_secret_manager(context_manager)
secret_name = f"{tenant_id}_{provider_type}_{provider_unique_id}"
secret_manager.write_secret(
@@ -649,6 +645,7 @@ async def install_provider_oauth2(
installed_by=installed_by,
installation_time=time.time(),
configuration_key=secret_name,
+ validatedScopes=validated_scopes,
)
session.add(provider)
session.commit()
@@ -788,6 +785,15 @@ def get_webhook_settings(
"https://", f"https://keep:{webhook_api_key}@"
)
+ try:
+ webhookMarkdown = provider_class.webhook_markdown.format(
+ keep_webhook_api_url=keep_webhook_api_url,
+ api_key=webhook_api_key,
+ keep_webhook_api_url_with_auth=keep_webhook_api_url_with_auth,
+ )
+ except AttributeError:
+ webhookMarkdown = None
+
logger.info("Got webhook settings", extra={"provider_type": provider_type})
return ProviderWebhookSettings(
webhookDescription=provider_class.webhook_description.format(
@@ -800,4 +806,5 @@ def get_webhook_settings(
api_key=webhook_api_key,
keep_webhook_api_url_with_auth=keep_webhook_api_url_with_auth,
),
+ webhookMarkdown=webhookMarkdown,
)
diff --git a/keep/api/routes/settings.py b/keep/api/routes/settings.py
index 9bf709ce7..c448ea8cc 100644
--- a/keep/api/routes/settings.py
+++ b/keep/api/routes/settings.py
@@ -245,7 +245,7 @@ async def update_smtp_settings(
smtp_settings = smtp_settings.dict()
smtp_settings["password"] = smtp_settings["password"].get_secret_value()
secret_manager.write_secret(
- secret_name="smtp", secret_value=json.dumps(smtp_settings)
+ secret_name=f"{tenant_id}_smtp", secret_value=json.dumps(smtp_settings)
)
return {"status": "SMTP settings updated successfully"}
@@ -263,7 +263,7 @@ async def get_smtp_settings(
secret_manager = SecretManagerFactory.get_secret_manager(context_manager)
# Read the SMTP settings from the secret manager
try:
- smtp_settings = secret_manager.read_secret(secret_name="smtp")
+ smtp_settings = secret_manager.read_secret(secret_name=f"{tenant_id}_smtp")
smtp_settings = json.loads(smtp_settings)
logger.info("SMTP settings retrieved successfully")
return JSONResponse(status_code=200, content=smtp_settings)
@@ -284,7 +284,7 @@ async def delete_smtp_settings(
context_manager = ContextManager(tenant_id=tenant_id)
secret_manager = SecretManagerFactory.get_secret_manager(context_manager)
# Read the SMTP settings from the secret manager
- secret_manager.delete_secret(secret_name="smtp")
+ secret_manager.delete_secret(secret_name=f"{tenant_id}_smtp")
logger.info("SMTP settings deleted successfully")
return JSONResponse(status_code=200, content={})
diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py
index a7c115cc3..ef00eda5d 100644
--- a/keep/api/routes/workflows.py
+++ b/keep/api/routes/workflows.py
@@ -1,3 +1,4 @@
+import datetime
import logging
from typing import Any, Dict, List, Optional
@@ -25,6 +26,7 @@
from keep.api.core.db import get_workflow_executions as get_workflow_executions_db
from keep.api.core.db import get_workflow_id_by_name
from keep.api.core.dependencies import AuthenticatedEntity, AuthVerifier
+from keep.api.models.alert import AlertDto
from keep.api.models.workflow import (
ProviderDTO,
WorkflowCreateOrUpdateDTO,
@@ -181,8 +183,23 @@ def run_workflow(
# Finally, run it
try:
+ # if its event that was triggered by the UI with the Modal
+ if "test-workflow" in body.get("fingerprint", "") or not body:
+ # some random
+ body["id"] = body.get("fingerprint", "manual-run")
+ body["name"] = body.get("fingerprint", "manual-run")
+ body["lastReceived"] = datetime.datetime.now(
+ tz=datetime.timezone.utc
+ ).isoformat()
+ try:
+ alert = AlertDto(**body)
+ except TypeError:
+ raise HTTPException(
+ status_code=400,
+ detail="Invalid alert format",
+ )
workflow_execution_id = workflowmanager.scheduler.handle_manual_event_workflow(
- workflow_id, tenant_id, created_by, body
+ workflow_id, tenant_id, created_by, alert
)
except Exception as e:
logger.exception(
@@ -222,6 +239,7 @@ async def __get_workflow_raw_data(request: Request, file: UploadFile) -> dict:
workflow_data = workflow_data.pop("workflow")
except yaml.YAMLError:
+ logger.exception("Invalid YAML format")
raise HTTPException(status_code=400, detail="Invalid YAML format")
return workflow_data
@@ -298,6 +316,7 @@ async def update_workflow_by_id(
workflow_from_db.description = workflow.get("description")
workflow_from_db.interval = workflow_interval
workflow_from_db.workflow_raw = yaml.dump(workflow)
+ workflow_from_db.last_updated = datetime.datetime.now()
session.add(workflow_from_db)
session.commit()
session.refresh(workflow_from_db)
diff --git a/keep/api/utils/tenant_utils.py b/keep/api/utils/tenant_utils.py
index 7430fe956..035c4775c 100644
--- a/keep/api/utils/tenant_utils.py
+++ b/keep/api/utils/tenant_utils.py
@@ -1,6 +1,5 @@
import hashlib
import logging
-from datetime import datetime
from typing import Optional
from uuid import uuid4
@@ -41,37 +40,6 @@ def get_api_key(
return api_key
-def update_key_last_used(
- session: Session,
- tenant_id: str,
- unique_api_key_id: str,
-) -> str:
- """
- Updates API key last used.
-
- Args:
- session (Session): _description_
- tenant_id (str): _description_
- unique_api_key_id (str): _description_
-
- Returns:
- str: _description_
- """
-
- # Get API Key from database
- statement = (
- select(TenantApiKey)
- .where(TenantApiKey.reference_id == unique_api_key_id)
- .where(TenantApiKey.tenant_id == tenant_id)
- )
-
- tenant_api_key_entry = session.exec(statement).first()
-
- # Update last used
- tenant_api_key_entry.last_used = datetime.utcnow()
- session.commit()
-
-
def update_api_key_internal(
session: Session,
tenant_id: str,
diff --git a/keep/cli/cli.py b/keep/cli/cli.py
index 99ed338c0..7faa8c7aa 100644
--- a/keep/cli/cli.py
+++ b/keep/cli/cli.py
@@ -18,10 +18,12 @@
from keep.api.core.dependencies import SINGLE_TENANT_UUID
from keep.cli.click_extensions import NotRequiredIf
from keep.posthog.posthog import get_posthog_client
+from keep.providers.providers_factory import ProvidersFactory
from keep.workflowmanager.workflowmanager import WorkflowManager
from keep.workflowmanager.workflowstore import WorkflowStore
load_dotenv(find_dotenv())
+
posthog_client = get_posthog_client()
try:
KEEP_VERSION = metadata.version("keep")
@@ -31,7 +33,6 @@
except metadata.PackageNotFoundError:
KEEP_VERSION = os.environ.get("KEEP_VERSION", "unknown")
-
logging_config = {
"version": 1,
"disable_existing_loggers": False,
@@ -114,9 +115,9 @@ def set_config(self, keep_config: str):
pass
self.api_key = self.config.get("api_key") or os.getenv("KEEP_API_KEY") or ""
self.keep_api_url = (
- self.config.get("keep_api_url")
- or os.getenv("KEEP_API_URL")
- or Info.KEEP_MANAGED_API_URL
+ self.config.get("keep_api_url")
+ or os.getenv("KEEP_API_URL")
+ or Info.KEEP_MANAGED_API_URL
)
self.random_user_id = self.config.get("random_user_id")
# if we don't have a random user id, we create one and keep it on the config file
@@ -130,10 +131,10 @@ def set_config(self, keep_config: str):
# if we auth, we don't need to check for api key
if (
- "auth" in arguments
- or "api" in arguments
- or "config" in arguments
- or "version" in arguments
+ "auth" in arguments
+ or "api" in arguments
+ or "config" in arguments
+ or "version" in arguments
):
return
@@ -218,14 +219,39 @@ def version():
click.echo(click.style(KEEP_VERSION, bold=True))
-@cli.command()
+@cli.group()
@pass_info
def config(info: Info):
- """Get the config."""
- keep_url = click.prompt("Enter your keep url", default="http://localhost:8080")
- api_key = click.prompt(
- "Enter your api key (leave blank for localhost)", hide_input=True, default=""
- )
+ """Manage the config."""
+ pass
+
+
+@config.command(name="show")
+@pass_info
+def show(info: Info):
+ """show the current config."""
+ click.echo(click.style("Current config", bold=True))
+ for key, value in info.config.items():
+ click.echo(f"{key}: {value}")
+
+
+@config.command(name="new")
+@click.option("--url", "-u", type=str, required=False, is_flag=False, flag_value="http://localhost:8080", help="The url of the keep api")
+@click.option("--api-key", "-a", type=str, required=False, is_flag=False, flag_value="", help="The api key for keep")
+@click.option("--interactive", "-i", help="Interactive mode creating keep config (default True)", is_flag=True)
+@pass_info
+def new_config(info: Info, url: str, api_key: str, interactive: bool):
+ """create new config."""
+ ctx = click.get_current_context()
+
+ if not interactive:
+ keep_url = ctx.params.get("url")
+ api_key = ctx.params.get("api_key")
+ else:
+ keep_url = click.prompt("Enter your keep url", default="http://localhost:8080")
+ api_key = click.prompt(
+ "Enter your api key (leave blank for localhost)", hide_input=True, default=""
+ )
if not api_key:
api_key = "localhost"
with open(f"{get_default_conf_file_path()}", "w") as f:
@@ -263,11 +289,19 @@ def whoami(info: Info):
@cli.command()
@click.option("--multi-tenant", is_flag=True, help="Enable multi-tenant mode")
-def api(multi_tenant: bool):
+@click.option("--port", "-p", type=int, default=8080, help="The port to run the API on")
+@click.option(
+ "--host", "-h", type=str, default="0.0.0.0", help="The host to run the API on"
+)
+def api(multi_tenant: bool, port: int, host: str):
"""Start the API."""
from keep.api import api
ctx = click.get_current_context()
+
+ api.PORT = ctx.params.get("port")
+ api.HOST = ctx.params.get("host")
+
if multi_tenant:
auth_type = "MULTI_TENANT"
else:
@@ -328,14 +362,14 @@ def api(multi_tenant: bool):
)
@pass_info
def run(
- info: Info,
- alerts_directory: str,
- alert_url: list[str],
- interval: int,
- providers_file,
- tenant_id,
- api_key,
- api_url,
+ info: Info,
+ alerts_directory: str,
+ alert_url: list[str],
+ interval: int,
+ providers_file,
+ tenant_id,
+ api_key,
+ api_url,
):
"""Run a workflow."""
logger.debug(f"Running alert in {alerts_directory or alert_url}")
@@ -414,6 +448,10 @@ def list_workflows(info: Info):
raise Exception(f"Error getting workflows: {resp.text}")
workflows = resp.json()
+ if len(workflows) == 0:
+ click.echo(click.style("No workflows found.", bold=True))
+ return
+
# Create a new table
table = PrettyTable()
# Add column headers
@@ -575,6 +613,10 @@ def list_workflow_executions(info: Info):
raise Exception(f"Error getting workflow executions: {resp.text}")
workflow_executions = resp.json()
+ if len(workflow_executions) == 0:
+ click.echo(click.style("No workflow executions found.", bold=True))
+ return
+
# Create a new table
table = PrettyTable()
# Add column headers
@@ -627,6 +669,10 @@ def get_workflow_execution_logs(info: Info, workflow_execution_id: str):
workflow_executions = resp.json()
workflow_execution_logs = workflow_executions[0].get("logs", [])
+ if len(workflow_execution_logs) == 0:
+ click.echo(click.style("No logs found for this workflow execution.", bold=True))
+ return
+
# Create a new table
table = PrettyTable()
# Add column headers
@@ -641,12 +687,14 @@ def get_workflow_execution_logs(info: Info, workflow_execution_id: str):
table.add_row([log["id"], log["timestamp"], log["message"]])
print(table)
+
@cli.group()
@pass_info
def mappings(info: Info):
"""Manage mappings."""
pass
+
@mappings.command(name="list")
@pass_info
def list_mappings(info: Info):
@@ -660,6 +708,9 @@ def list_mappings(info: Info):
raise Exception(f"Error getting mappings: {resp.text}")
mappings = resp.json()
+ if len(mappings) == 0:
+ click.echo(click.style("No mappings found.", bold=True))
+ return
# Create a new table
table = PrettyTable()
@@ -692,19 +743,21 @@ def list_mappings(info: Info):
]
)
print(table)
-@mappings.command()
+
+
+@mappings.command(name="create")
@click.option(
"--name",
"-n",
type=str,
- help="The name of the mapping",
+ help="The name of the mapping.",
required=True,
)
@click.option(
"--description",
"-d",
type=str,
- help="The description of the mapping",
+ help="The description of the mapping.",
required=False,
default="",
)
@@ -712,18 +765,28 @@ def list_mappings(info: Info):
"--file",
"-f",
type=click.Path(exists=True),
- help="The mapping file",
+ help="The mapping file. Must be a CSV file.",
required=True,
)
@click.option(
"--matchers",
"-m",
type=str,
- help="The matchers of the mapping, as a comma-separated list of strings",
+ help="The matchers of the mapping, as a comma-separated list of strings.",
required=True,
)
+@click.option(
+ "--priority",
+ "-p",
+ type=click.IntRange(0, 100),
+ help="The priority of the mapping, higher priority means this rule will execute first.",
+ required=False,
+ default=0,
+)
@pass_info
-def create(info: Info, name: str, description: str, file: str, matchers: str):
+def create(
+ info: Info, name: str, description: str, file: str, matchers: str, priority: int
+):
"""Create a mapping rule."""
if os.path.isfile(file) and file.endswith(".csv"):
with open(file, "rb") as f:
@@ -752,28 +815,34 @@ def create(info: Info, name: str, description: str, file: str, matchers: str):
"file_name": file_name,
"matchers": matchers.split(","),
"rows": rows,
- }
+ "priority": priority,
+ },
)
# Check the response
if response.ok:
- click.echo(click.style(f"Mapping rule {file_name} created successfully", bold=True))
+ click.echo(
+ click.style(f"Mapping rule {file_name} created successfully", bold=True)
+ )
else:
click.echo(
click.style(
- f"Error creating mapping rule {file_name}: {response.text}", bold=True
+ f"Error creating mapping rule {file_name}: {response.text}",
+ bold=True,
)
)
+
+
@mappings.command(name="delete")
@click.option(
"--mapping-id",
type=int,
- help="The ID of the mapping to delete",
+ help="The ID of the mapping to delete.",
required=True,
)
@pass_info
def delete_mapping(info: Info, mapping_id: int):
- """Delete a mapping with a specified ID"""
+ """Delete a mapping with a specified ID."""
# Delete the mapping with the specified ID
mappings_endpoint = info.keep_api_url + f"/mapping/{mapping_id}"
@@ -785,7 +854,9 @@ def delete_mapping(info: Info, mapping_id: int):
# Check the response
if response.ok:
response = response.json()
- click.echo(click.style(f"Mapping rule {mapping_id} deleted successfully", bold=True))
+ click.echo(
+ click.style(f"Mapping rule {mapping_id} deleted successfully", bold=True)
+ )
else:
click.echo(
click.style(
@@ -793,6 +864,7 @@ def delete_mapping(info: Info, mapping_id: int):
)
)
+
@cli.group()
@pass_info
def provider(info: Info):
@@ -907,10 +979,11 @@ def connect(ctx, help: bool, provider_name, provider_type, params):
]
provider_type = provider.get("type")
for param, details in provider["config"].items():
+ param_as_flag = f"--{param.replace('_', '-')}"
table.add_row(
[
provider_type,
- param,
+ param_as_flag,
details.get("required", False),
details.get("description", "no description"),
]
@@ -939,7 +1012,7 @@ def connect(ctx, help: bool, provider_name, provider_type, params):
for config in provider["config"]:
config_as_flag = f"--{config.replace('_', '-')}"
if config_as_flag not in options_dict and provider["config"][config].get(
- "required", True
+ "required", True
):
raise click.BadOptionUsage(
config_as_flag,
@@ -1035,6 +1108,7 @@ def alert(info: Info):
)
@pass_info
def get_alert(info: Info, fingerprint: str):
+ """Get an alert by fingerprint."""
resp = _get_alert_by_fingerprint(info.keep_api_url, info.api_key, fingerprint)
if not resp.ok:
raise Exception(f"Error getting alert: {resp.text}")
@@ -1074,6 +1148,11 @@ def list_alerts(info: Info, filter: typing.List[str], export: bool):
aggregated_alerts[alert["fingerprint"]] = alert
alerts = aggregated_alerts.values()
+
+ if len(alerts) == 0:
+ click.echo(click.style("No alerts found.", bold=True))
+ return
+
# Apply all provided filters
for filt in filter:
key, value = filt.split("=")
@@ -1169,6 +1248,55 @@ def enrich(info: Info, fingerprint, params):
click.echo(click.style(f"Alert {fingerprint} enriched successfully", bold=True))
+@alert.command()
+@click.option(
+ "--provider-type",
+ "-p",
+ type=click.Path(exists=False),
+ help="The type of the provider which will be used to simulate the alert.",
+ required=True,
+)
+@click.argument("params", nargs=-1, type=click.UNPROCESSED)
+@pass_info
+def simulate(info: Info, provider_type: str, params: list[str]):
+ """Simulate an alert."""
+ click.echo(click.style("Simulating alert", bold=True))
+ try:
+ provider = ProvidersFactory.get_provider_class(provider_type)
+ except Exception as e:
+ click.echo(click.style(f"No such provuder: {e}", bold=True))
+ return
+
+ try:
+ alert = provider.simulate_alert()
+ except Exception:
+ click.echo(click.style("Provider does not support alert simulation", bold=True))
+ return
+ # override the alert with the provided params
+ for param in params:
+ key, value = param.split("=")
+ # if the param contains "."
+ if "." in key:
+ # split the key by "." and set the value in the alert
+ keys = key.split(".")
+ alert[keys[0]][keys[1]] = value
+ else:
+ alert[key] = value
+ click.echo("Simulated alert:")
+ click.echo(json.dumps(alert, indent=4))
+ # send the alert to the server
+ resp = make_keep_request(
+ "POST",
+ info.keep_api_url + f"/alerts/event/{provider_type}",
+ headers={"x-api-key": info.api_key, "accept": "application/json"},
+ json=alert,
+ )
+ if not resp.ok:
+ click.echo(click.style(f"Error simulating alert: {resp.text}", bold=True))
+ else:
+ click.echo(click.style("Alert simulated successfully", bold=True))
+
+
@cli.group()
@pass_info
def auth(info: Info):
diff --git a/keep/functions/__init__.py b/keep/functions/__init__.py
index 2e994a946..6a1760947 100644
--- a/keep/functions/__init__.py
+++ b/keep/functions/__init__.py
@@ -15,37 +15,38 @@ def all(iterable) -> bool:
g = groupby(iterable)
return next(g, True) and not next(g, False)
-
def diff(iterable: iter) -> bool:
# Opposite of all - returns True if any element is different
return not all(iterable)
-
def len(iterable=[]) -> int:
return _len(iterable)
+def uppercase(string) -> str:
+ return string.upper()
+
+def lowercase(string) -> str:
+ return string.lower()
def split(string, delimeter) -> list:
return string.strip().split(delimeter)
-
def strip(string) -> str:
return string.strip()
-
def first(iterable):
return iterable[0]
+def last(iterable):
+ return iterable[-1]
def utcnow() -> datetime.datetime:
dt = datetime.datetime.now(datetime.timezone.utc)
return dt
-
def utcnowiso() -> str:
return utcnow().isoformat()
-
def substract_minutes(dt: datetime.datetime, minutes: int) -> datetime.datetime:
"""
Substract minutes from a datetime object
@@ -59,28 +60,23 @@ def substract_minutes(dt: datetime.datetime, minutes: int) -> datetime.datetime:
"""
return dt - datetime.timedelta(minutes=minutes)
-
def to_utc(dt: datetime.datetime | str) -> datetime.datetime:
if isinstance(dt, str):
dt = parser.parse(dt)
utc_dt = dt.astimezone(pytz.utc)
return utc_dt
-
def datetime_compare(t1, t2) -> float:
diff = (t1 - t2).total_seconds() / 3600
return diff
-
def json_dumps(data: str | dict) -> str:
if isinstance(data, str):
data = json.loads(data)
return json.dumps(data, indent=4, default=str)
-
def encode(string) -> str:
return urllib.parse.quote(string)
-
def dict_to_key_value_list(d: dict) -> list:
return [f"{k}:{v}" for k, v in d.items()]
diff --git a/keep/iohandler/iohandler.py b/keep/iohandler/iohandler.py
index a83d7bf3e..5f10f9997 100644
--- a/keep/iohandler/iohandler.py
+++ b/keep/iohandler/iohandler.py
@@ -64,6 +64,63 @@ def quote(self, template):
replacement = r"'{{ \1 }}'"
return re.sub(pattern, replacement, template)
+ def extract_keep_functions(self, text):
+ matches = []
+ i = 0
+ while i < len(text):
+ if text[i : i + 5] == "keep.":
+ start = i
+ func_end = text.find("(", start)
+ if func_end > -1: # Opening '(' found after "keep."
+ i = func_end + 1 # Move i to the character after '('
+ paren_count = 1
+ in_string = False
+ escape_next = False
+ quote_char = ""
+
+ while i < len(text) and (paren_count > 0 or in_string):
+ if text[i] == "\\" and in_string and not escape_next:
+ escape_next = True
+ i += 1
+ continue
+ elif text[i] in ('"', "'"):
+ if not in_string:
+ in_string = True
+ quote_char = text[i]
+ elif text[i] == quote_char and not escape_next:
+ in_string = False
+ quote_char = ""
+ elif text[i] == "(" and not in_string:
+ paren_count += 1
+ elif text[i] == ")" and not in_string:
+ paren_count -= 1
+
+ escape_next = False
+ i += 1
+
+ if paren_count == 0:
+ matches.append(text[start:i])
+ continue # Skip the increment at the end of the loop to continue from the current position
+ else:
+ # If no '(' found, increment i to move past "keep."
+ i += 5
+ else:
+ i += 1
+ return matches
+
+ def _trim_token_error(self, token):
+ # trim too long tokens so that the error message will be readable
+ if len(token) > 64:
+ try:
+ func_name = token.split("keep.")[1].split("(")[0]
+ err = f"keep.{func_name}(...)"
+ except Exception:
+ err = token
+ finally:
+ return err
+ else:
+ return token
+
def parse(self, string, safe=False, default=""):
"""Use AST module to parse 'call stack'-like string and return the result
@@ -92,24 +149,34 @@ def parse(self, string, safe=False, default=""):
string = self._render(string, safe, default)
# Now, extract the token if exists -
- pattern = (
- r"\bkeep\.\w+\((?:[^()]*|\((?:[^()]*|\((?:[^()]*|\([^()]*\))*\))*\))*\)"
- )
parsed_string = copy.copy(string)
- matches = re.findall(pattern, parsed_string)
- tokens = list(matches)
-
+ tokens = self.extract_keep_functions(parsed_string)
if len(tokens) == 0:
return parsed_string
elif len(tokens) == 1:
token = "".join(tokens[0])
- val = self._parse_token(token)
+ try:
+ val = self._parse_token(token)
+ except Exception as e:
+ # trim stacktrace since we have limitation on the error message
+ trimmed_token = self._trim_token_error(token)
+ err_message = str(e).splitlines()[-1]
+ raise Exception(
+ f"Got {e.__class__.__name__} while parsing token '{trimmed_token}': {err_message}"
+ )
parsed_string = parsed_string.replace(token, str(val))
return parsed_string
# this basically for complex expressions with functions and operators
for token in tokens:
token = "".join(token)
- val = self._parse_token(token)
+ try:
+ val = self._parse_token(token)
+ except Exception as e:
+ trimmed_token = self._trim_token_error(token)
+ err_message = str(e).splitlines()[-1]
+ raise Exception(
+ f"Got {e.__class__.__name__} while parsing token '{trimmed_token}': {err_message}"
+ )
parsed_string = parsed_string.replace(token, str(val))
return parsed_string
@@ -194,13 +261,24 @@ def _render(self, key, safe=False, default=""):
original_stderr = sys.stderr
sys.stderr = io.StringIO()
rendered = chevron.render(_key, context, warn=True)
+ # chevron.render will escape the quotes, we need to unescape them
+ rendered = rendered.replace(""", '"')
stderr_output = sys.stderr.getvalue()
sys.stderr = original_stderr
# If render should failed if value does not exists
if safe and "Could not find key" in stderr_output:
- raise RenderException(
- f"Could not find key {key} in context - {stderr_output}"
- )
+ # if more than one keys missing, pretiffy the error
+ if stderr_output.count("Could not find key") > 1:
+ missing_keys = stderr_output.split("Could not find key")
+ missing_keys = [
+ missing_key.strip().replace("\n", "")
+ for missing_key in missing_keys[1:]
+ ]
+ missing_keys = list(set(missing_keys))
+ err = "Could not find keys: " + ", ".join(missing_keys)
+ else:
+ err = stderr_output.replace("\n", "")
+ raise RenderException(f"{err} in the context.")
if not rendered:
return default
return rendered
diff --git a/keep/parser/parser.py b/keep/parser/parser.py
index e272f2ee5..f595e412b 100644
--- a/keep/parser/parser.py
+++ b/keep/parser/parser.py
@@ -98,7 +98,7 @@ def _get_workflow_provider_types_from_steps_and_actions(
if provider_type not in provider_types:
provider_types.append(provider_type)
except Exception:
- self.logger.warn(
+ self.logger.warning(
"Could not get provider type from step or action",
extra={"step_or_action": step_or_action},
)
diff --git a/keep/providers/azuremonitoring_provider/__init__.py b/keep/providers/azuremonitoring_provider/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/keep/providers/azuremonitoring_provider/azuremonitoring_provider.py b/keep/providers/azuremonitoring_provider/azuremonitoring_provider.py
new file mode 100644
index 000000000..fabf8307c
--- /dev/null
+++ b/keep/providers/azuremonitoring_provider/azuremonitoring_provider.py
@@ -0,0 +1,117 @@
+"""
+PrometheusProvider is a class that provides a way to read data from Prometheus.
+"""
+
+import datetime
+from typing import Optional
+
+from keep.api.models.alert import AlertDto, AlertSeverity, AlertStatus
+from keep.contextmanager.contextmanager import ContextManager
+from keep.providers.base.base_provider import BaseProvider
+from keep.providers.models.provider_config import ProviderConfig
+
+
+class AzuremonitoringProvider(BaseProvider):
+ """Get alerts from Azure Monitor into Keep."""
+
+ webhook_description = ""
+ webhook_template = ""
+ webhook_markdown = """
+💡 For more details on how to configure Azure Monitor to send alerts to Keep, see the [Keep documentation](https://docs.keephq.dev/providers/documentation/azuremonitoring-provider). 💡
+
+To send alerts from Azure Monitor to Keep, Use the following webhook url to configure Azure Monitor send alerts to Keep:
+
+1. In Azure Monitor, create a new Action Group.
+2. In the Action Group, add a new action of type "Webhook".
+3. In the Webhook action, configure the webhook with the following settings.
+- **Name**: keep-azuremonitoring-webhook-integration
+- **URL**: {keep_webhook_api_url_with_auth}
+4. Save the Action Group.
+5. In the Alert Rule, configure the Action Group to use the Action Group created in step 1.
+6. Save the Alert Rule.
+7. Test the Alert Rule to ensure that the alerts are being sent to Keep.
+"""
+
+ # Maps Azure Monitor severity to Keep's format
+ SEVERITIES_MAP = {
+ "Sev0": AlertSeverity.CRITICAL,
+ "Sev1": AlertSeverity.HIGH,
+ "Sev2": AlertSeverity.WARNING,
+ "Sev3": AlertSeverity.INFO,
+ "Sev4": AlertSeverity.LOW,
+ }
+
+ # Maps Azure Monitor monitor condition to Keep's format
+ STATUS_MAP = {
+ "Resolved": AlertStatus.RESOLVED,
+ "Fired": AlertStatus.FIRING,
+ }
+
+ PROVIDER_DISPLAY_NAME = "Azure Monitor"
+
+ def __init__(
+ self, context_manager: ContextManager, provider_id: str, config: ProviderConfig
+ ):
+ super().__init__(context_manager, provider_id, config)
+
+ def validate_config(self):
+ """
+ Validates required configuration for Prometheus's provider.
+ """
+ # no config
+ pass
+
+ @staticmethod
+ def _format_alert(
+ event: dict, provider_instance: Optional["AzuremonitoringProvider"]
+ ) -> AlertDto:
+ essentials = event.get("data", {}).get("essentials", {})
+ alert_context = event.get("data", {}).get("alertContext", {})
+
+ # Extract and format the alert ID
+ alert_id = essentials.get("alertId", "").split("/")[-1]
+
+ # Format the severity
+ severity = AzuremonitoringProvider.SEVERITIES_MAP.get(
+ essentials.get("severity"), AlertSeverity.INFO
+ )
+
+ # Format the status
+ status = AzuremonitoringProvider.STATUS_MAP.get(
+ essentials.get("monitorCondition"), AlertStatus.FIRING
+ )
+
+ # Parse and format the timestamp
+ event_time = essentials.get("firedDateTime", essentials.get("resolvedDateTime"))
+ if event_time:
+ event_time = datetime.datetime.fromisoformat(event_time)
+
+ # Extract other essential fields
+ resource_ids = essentials.get("alertTargetIDs", [])
+ description = essentials.get("description", "")
+ subscription = essentials.get("alertId", "").split("/")[2]
+
+ url = f"https://portal.azure.com/#view/Microsoft_Azure_Monitoring_Alerts/AlertDetails.ReactView/alertId~/%2Fsubscriptions%2F{subscription}%2Fproviders%2FMicrosoft.AlertsManagement%2Falerts%2F{alert_id}"
+ # Construct the alert object
+ alert = AlertDto(
+ id=alert_id,
+ name=essentials.get("alertRule", ""),
+ status=status,
+ lastReceived=str(event_time),
+ source=["azuremonitoring"],
+ description=description,
+ groups=resource_ids,
+ severity=severity,
+ url=url,
+ monitor_id=essentials.get("originAlertId", ""),
+ alertContext=alert_context,
+ essentials=essentials,
+ customProperties=event.get("data", {}).get("customProperties", {}),
+ )
+
+ # Set fingerprint if applicable
+ return alert
+
+
+if __name__ == "__main__":
+ pass
diff --git a/keep/providers/base/base_provider.py b/keep/providers/base/base_provider.py
index cc7f8fd0e..ef5ff9c15 100644
--- a/keep/providers/base/base_provider.py
+++ b/keep/providers/base/base_provider.py
@@ -1,6 +1,7 @@
"""
Base class for all providers.
"""
+
import abc
import copy
import datetime
@@ -43,6 +44,7 @@ def __init__(
config: ProviderConfig,
webhooke_template: Optional[str] = None,
webhook_description: Optional[str] = None,
+ webhook_markdown: Optional[str] = None,
provider_description: Optional[str] = None,
):
"""
@@ -57,6 +59,7 @@ def __init__(
self.config = config
self.webhooke_template = webhooke_template
self.webhook_description = webhook_description
+ self.webhook_markdown = webhook_markdown
self.provider_description = provider_description
self.context_manager = context_manager
self.logger = context_manager.get_logger()
@@ -228,14 +231,31 @@ def query(self, **kwargs: dict):
return results
@staticmethod
- def _format_alert(event: dict) -> AlertDto | list[AlertDto]:
+ def _format_alert(
+ event: dict, provider_instance: Optional["BaseProvider"]
+ ) -> AlertDto | list[AlertDto]:
+ """
+ Format an incoming alert.
+
+ Args:
+ event (dict): The raw provider event payload.
+ provider_instance (Optional["BaseProvider"]): The tenant provider instance if it was successfully loaded.
+
+ Raises:
+ NotImplementedError: For providers who does not implement this method.
+
+ Returns:
+ AlertDto | list[AlertDto]: The formatted alert(s).
+ """
raise NotImplementedError("format_alert() method not implemented")
@classmethod
- def format_alert(cls, event: dict) -> AlertDto | list[AlertDto]:
+ def format_alert(
+ cls, event: dict, provider_instance: Optional["BaseProvider"]
+ ) -> AlertDto | list[AlertDto]:
logger = logging.getLogger(__name__)
logger.debug("Formatting alert")
- formatted_alert = cls._format_alert(event)
+ formatted_alert = cls._format_alert(event, provider_instance)
logger.debug("Alert formatted")
return formatted_alert
@@ -524,15 +544,21 @@ def _push_alert(self, alert: dict):
f"Failed to push alert to {self.provider_id}: {response.content}"
)
- @staticmethod
- def simulate_alert(**kwargs) -> AlertDto:
- """
- Simulate an alert.
+ @classmethod
+ def simulate_alert(cls) -> dict:
+ # can be overridden by the provider
+ import importlib
+ import random
- Args:
- **kwargs (dict): The provider context (with statement)
+ module_path = ".".join(cls.__module__.split(".")[0:-1]) + ".alerts_mock"
+ module = importlib.import_module(module_path)
- Returns:
- AlertDto: The simulated alert.
- """
- raise NotImplementedError("simulate_alert() method not implemented")
+ ALERTS = getattr(module, "ALERTS", None)
+
+ alert_type = random.choice(list(ALERTS.keys()))
+ alert_data = ALERTS[alert_type]
+
+ # Start with the base payload
+ simulated_alert = alert_data["payload"].copy()
+
+ return simulated_alert
diff --git a/keep/providers/cloudwatch_provider/cloudwatch_provider.py b/keep/providers/cloudwatch_provider/cloudwatch_provider.py
index 25355303e..8bd49ba23 100644
--- a/keep/providers/cloudwatch_provider/cloudwatch_provider.py
+++ b/keep/providers/cloudwatch_provider/cloudwatch_provider.py
@@ -9,6 +9,7 @@
import logging
import os
import time
+from typing import Optional
from urllib.parse import urlparse
import boto3
@@ -92,6 +93,13 @@ class CloudwatchProvider(BaseProvider):
mandatory=False,
alias="Read Query results",
),
+ ProviderScope(
+ name="logs:DescribeQueries",
+ description="Part of CloudWatchLogsReadOnlyAccess role. Required to describe the results of CloudWatch Logs Insights queries.",
+ documentation_url="https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_DescribeQueries.html",
+ mandatory=False,
+ alias="Describe Query results",
+ ),
ProviderScope(
name="logs:StartQuery",
description="Part of CloudWatchLogsReadOnlyAccess role. Required to start CloudWatch Logs Insights queries.",
@@ -174,7 +182,17 @@ def validate_scopes(self):
for res in iam_resp.get("EvaluationResults")
}
scopes["iam:SimulatePrincipalPolicy"] = True
- return scopes
+ if all(scopes.values()):
+ self.logger.info(
+ "All AWS IAM scopes are granted!", extra={"scopes": scopes}
+ )
+ return scopes
+ # if not all the scopes are granted, we need to test them one by one
+ else:
+ self.logger.warning(
+ "Some of the AWS IAM scopes are not granted, testing them one by one...",
+ extra={"scopes": scopes},
+ )
# otherwise, we need to test them one by one
except Exception:
self.logger.info("Error validating AWS IAM scopes")
@@ -239,7 +257,7 @@ def validate_scopes(self):
# 4. validate start query
logs_client = self.__generate_client("logs")
try:
- logs_client.start_query(
+ query = logs_client.start_query(
logGroupName="keepTest",
queryString="keepTest",
startTime=int(
@@ -258,16 +276,24 @@ def validate_scopes(self):
else:
self.logger.info("Error validating AWS logs:StartQuery scope")
scopes["logs:StartQuery"] = str(e)
+ if query:
+ try:
+ query_id = logs_client.describe_queries().get("queries")[0]["queryId"]
+ except Exception:
+ self.logger.exception("Error validating AWS logs:DescribeQueries scope")
+ scopes[
+ "logs:GetQueryResults",
+ "logs:DescribeQueries"
+ ] = "Could not validate logs:GetQueryResults scope without logs:DescribeQueries, so assuming the scope is not granted."
+ try:
+ logs_client.get_query_results(queryId=query_id)
+ scopes["logs:StartQuery"] = True
+ scopes["logs:DescribeQueries"] = True
+ except Exception as e:
+ self.logger.exception("Error validating AWS logs:StartQuery scope")
+ scopes["logs:StartQuery"] = str(e)
# 5. validate get query results
- try:
- query_id = logs_client.describe_queries().get("queries")[0]["queryId"]
- except Exception:
- self.logger.exception("Error validating AWS logs:DescribeQueries scope")
- scopes[
- "logs:GetQueryResults"
- ] = "Could not validate logs:GetQueryResults scope without logs:DescribeQueries, so assuming the scope is not granted."
-
if query_id:
try:
logs_client.get_query_results(queryId=query_id)
@@ -275,6 +301,7 @@ def validate_scopes(self):
except Exception as e:
self.logger.exception("Error validating AWS logs:GetQueryResults scope")
scopes["logs:GetQueryResults"] = str(e)
+
# Finally
return scopes
@@ -377,7 +404,7 @@ def setup_webhook(
actions = alarm.get("AlarmActions", [])
# extract only SNS actions
topics = [action for action in actions if action.startswith("arn:aws:sns")]
- # if we got explicitly SNS topic, add is as an action
+ # if we got explicitly SNS topic, add it as an action
if self.authentication_config.cloudwatch_sns_topic:
self.logger.warning(
"Cannot hook alarm without SNS topic, trying to add SNS action..."
@@ -391,22 +418,32 @@ def setup_webhook(
else:
sns_topic = self.authentication_config.cloudwatch_sns_topic
actions.append(sns_topic)
- try:
- alarm["AlarmActions"] = actions
- # filter out irrelevant files
- filtered_alarm = {
- k: v
- for k, v in alarm.items()
- if k in CloudwatchProvider.VALID_ALARM_KEYS
- }
- cloudwatch_client.put_metric_alarm(**filtered_alarm)
- # now it should contain the SNS topic
- topics = [sns_topic]
- except Exception:
- self.logger.exception(
- "Error adding SNS action to alarm %s", alarm.get("AlarmName")
+ # if the alarm already has the SNS topic as action, we don't need to add it again
+ if sns_topic in actions:
+ self.logger.info(
+ "SNS action already added to alarm %s, skipping...",
+ alarm.get("AlarmName"),
)
- continue
+ else:
+ self.logger.info(
+ "Adding SNS action to alarm %s...", alarm.get("AlarmName")
+ )
+ try:
+ alarm["AlarmActions"] = actions
+ # filter out irrelevant files
+ filtered_alarm = {
+ k: v
+ for k, v in alarm.items()
+ if k in CloudwatchProvider.VALID_ALARM_KEYS
+ }
+ cloudwatch_client.put_metric_alarm(**filtered_alarm)
+ # now it should contain the SNS topic
+ topics = [sns_topic]
+ except Exception:
+ self.logger.exception(
+ "Error adding SNS action to alarm %s", alarm.get("AlarmName")
+ )
+ continue
self.logger.info(
"SNS action added to alarm %s!", alarm.get("AlarmName")
)
@@ -456,7 +493,9 @@ def setup_webhook(
self.logger.info("Webhook setup completed!")
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["CloudwatchProvider"]
+ ) -> AlertDto:
logger = logging.getLogger(__name__)
# if its confirmation event, we need to confirm the subscription
if event.get("Type") == "SubscriptionConfirmation":
diff --git a/keep/providers/datadog_provider/datadog_provider.py b/keep/providers/datadog_provider/datadog_provider.py
index 6546fc13d..2f6a59ab7 100644
--- a/keep/providers/datadog_provider/datadog_provider.py
+++ b/keep/providers/datadog_provider/datadog_provider.py
@@ -6,6 +6,7 @@
import json
import os
import time
+from typing import Optional
import pydantic
import requests
@@ -747,7 +748,10 @@ def setup_webhook(
)
self.logger.info("Monitors updated")
- def _format_alert(event: dict) -> AlertDto:
+ @staticmethod
+ def _format_alert(
+ event: dict, provider_instance: Optional["DatadogProvider"]
+ ) -> AlertDto:
tags_list = event.get("tags", "").split(",")
tags_list.remove("monitor")
tags = {k: v for k, v in map(lambda tag: tag.split(":"), tags_list)}
@@ -816,8 +820,8 @@ def get_logs(self, limit: int = 5) -> list:
def get_alert_schema():
return DatadogAlertFormatDescription.schema()
- @staticmethod
- def simulate_alert() -> dict:
+ @classmethod
+ def simulate_alert(cls) -> dict:
# Choose a random alert type
import hashlib
import random
diff --git a/keep/providers/dynatrace_provider/dynatrace_provider.py b/keep/providers/dynatrace_provider/dynatrace_provider.py
index f61093c88..128b04748 100644
--- a/keep/providers/dynatrace_provider/dynatrace_provider.py
+++ b/keep/providers/dynatrace_provider/dynatrace_provider.py
@@ -7,6 +7,7 @@
import json
import logging
import os
+from typing import Optional
import pydantic
import requests
@@ -118,7 +119,7 @@ def _get_alerts(self) -> list[AlertDto]:
raise Exception(f"Failed to get problems from Dynatrace: {response.text}")
else:
return [
- self.format_alert(event)
+ self._format_alert(event)
for event in response.json().get("problems", [])
]
@@ -208,7 +209,9 @@ def validate_scopes(self):
return scopes
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["DynatraceProvider"]
+ ) -> AlertDto:
# alert that comes from webhook
if event.get("ProblemID"):
tags = event.get("Tags", [])
diff --git a/keep/providers/gcpmonitoring_provider/__init__.py b/keep/providers/gcpmonitoring_provider/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/keep/providers/gcpmonitoring_provider/gcpmonitoring_provider.py b/keep/providers/gcpmonitoring_provider/gcpmonitoring_provider.py
new file mode 100644
index 000000000..2b39d3b73
--- /dev/null
+++ b/keep/providers/gcpmonitoring_provider/gcpmonitoring_provider.py
@@ -0,0 +1,119 @@
+"""
+PrometheusProvider is a class that provides a way to read data from Prometheus.
+"""
+
+import datetime
+from typing import Optional
+
+from keep.api.models.alert import AlertDto, AlertSeverity, AlertStatus
+from keep.contextmanager.contextmanager import ContextManager
+from keep.providers.base.base_provider import BaseProvider
+from keep.providers.models.provider_config import ProviderConfig
+
+
+class GcpmonitoringProvider(BaseProvider):
+ """Get alerts from Azure Monitor into Keep."""
+
+ webhook_description = ""
+ webhook_template = ""
+ webhook_markdown = """
+💡 For more details on how to configure GCP Monitoring to send alerts to Keep, see the [Keep documentation](https://docs.keephq.dev/providers/documentation/gcpmonitoring-provider). 💡
+
+To send alerts from GCP Monitoring to Keep, Use the following webhook url to configure GCP Monitoring send alerts to Keep:
+
+1. In GCP Monitoring, go to Notification Channels.
+2. In the Webhooks section click "ADD NEW".
+3. In the Endpoint URL, configure:
+- **Endpoint URL**: {keep_webhook_api_url}
+- **Display Name**: keep-gcpmonitoring-webhook-integration
+4. Click on "Use HTTP Basic Auth"
+- **Auth Username**: api_key
+- **Auth Password**: {api_key}
+5. Click on "Save".
+6. Go the the Alert Policy that you want to send to Keep and click on "Edit".
+7. Go to "Notifications and name"
+8. Click on "Notification Channels" and select the "keep-gcpmonitoring-webhook-integration" that you created in step 3.
+9. Click on "SAVE POLICY".
+"""
+
+ # https://github.com/hashicorp/terraform-provider-google/blob/main/google/services/monitoring/resource_monitoring_alert_policy.go#L963
+ SEVERITIES_MAP = {
+ "CRITICAL": AlertSeverity.CRITICAL,
+ "ERROR": AlertSeverity.HIGH,
+ "WARNING": AlertSeverity.WARNING,
+ }
+
+ STATUS_MAP = {
+ "CLOSED": AlertStatus.RESOLVED,
+ "OPEN": AlertStatus.FIRING,
+ }
+
+ PROVIDER_DISPLAY_NAME = "GCP Monitoring"
+ FINGERPRINT_FIELDS = ["incident_id"]
+
+ def __init__(
+ self, context_manager: ContextManager, provider_id: str, config: ProviderConfig
+ ):
+ super().__init__(context_manager, provider_id, config)
+
+ def validate_config(self):
+ """
+ Validates required configuration for Prometheus's provider.
+ """
+ # no config
+ pass
+
+ @staticmethod
+ def _format_alert(
+ event: dict, provider_instance: Optional["GcpmonitoringProvider"]
+ ) -> AlertDto:
+ incident = event.get("incident", {})
+ description = incident.pop("summary", "")
+ status = GcpmonitoringProvider.STATUS_MAP.get(
+ incident.pop("state", "").upper(), AlertStatus.FIRING
+ )
+ url = incident.pop("url", "")
+ name = incident.pop("documentation", {}).get("subject")
+ incident_id = incident.pop("incident_id", "")
+ # Get the severity
+ if "severity" in incident:
+ severity = GcpmonitoringProvider.SEVERITIES_MAP.get(
+ incident.pop("severity").upper(), AlertSeverity.INFO
+ )
+ # In some cases (this is from the terraform provider) the severity is in the policy_user_labels
+ else:
+ severity = GcpmonitoringProvider.SEVERITIES_MAP.get(
+ incident.get("policy_user_labels", {}).get("severity"),
+ AlertSeverity.INFO,
+ )
+ # Parse and format the timestamp
+ event_time = incident.get("started_at")
+ if event_time:
+ event_time = datetime.datetime.fromtimestamp(event_time)
+ else:
+ event_time = datetime.datetime.utcnow()
+ # replace timezone to utc
+ event_time = event_time.replace(tzinfo=datetime.timezone.utc)
+
+ # Construct the alert object
+ alert = AlertDto(
+ id=incident_id,
+ name=name,
+ status=status,
+ lastReceived=str(event_time),
+ source=["gcpmonitoring"],
+ description=description,
+ severity=severity,
+ url=url,
+ **incident
+ )
+
+ # Set fingerprint if applicable
+ alert.fingerprint = BaseProvider.get_alert_fingerprint(
+ alert, GcpmonitoringProvider.FINGERPRINT_FIELDS
+ )
+ return alert
+
+
+if __name__ == "__main__":
+ pass
diff --git a/keep/providers/grafana_provider/grafana_provider.py b/keep/providers/grafana_provider/grafana_provider.py
index fc1f2c62a..30f9b2f33 100644
--- a/keep/providers/grafana_provider/grafana_provider.py
+++ b/keep/providers/grafana_provider/grafana_provider.py
@@ -4,6 +4,7 @@
import dataclasses
import datetime
+from typing import Optional
import pydantic
import requests
@@ -155,7 +156,7 @@ def get_alerts_configuration(self, alert_id: str | None = None):
headers = {"Authorization": f"Bearer {self.authentication_config.token}"}
response = requests.get(api, verify=False, headers=headers)
if not response.ok:
- self.logger.warn(
+ self.logger.warning(
"Could not get alerts", extra={"response": response.json()}
)
error = response.json()
@@ -174,7 +175,7 @@ def deploy_alert(self, alert: dict, alert_id: str | None = None):
if not response.ok:
response_json = response.json()
- self.logger.warn(
+ self.logger.warning(
"Could not deploy alert", extra={"response": response_json}
)
raise Exception(response_json)
@@ -192,7 +193,9 @@ def get_alert_schema():
return GrafanaAlertFormatDescription.schema()
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["GrafanaProvider"]
+ ) -> AlertDto:
alerts = event.get("alerts", [])
formatted_alerts = []
for alert in alerts:
@@ -487,8 +490,8 @@ def _get_alerts(self) -> list[AlertDto]:
return alerts
return []
- @staticmethod
- def simulate_alert(**kwargs) -> dict:
+ @classmethod
+ def simulate_alert(cls, **kwargs) -> dict:
import hashlib
import json
import random
diff --git a/keep/providers/keep_provider/keep_provider.py b/keep/providers/keep_provider/keep_provider.py
index 29f960c37..e13fbf6cd 100644
--- a/keep/providers/keep_provider/keep_provider.py
+++ b/keep/providers/keep_provider/keep_provider.py
@@ -1,7 +1,9 @@
"""
Keep Provider is a class that allows to ingest/digest data from Keep.
"""
+
import logging
+from typing import Optional
from keep.api.core.db import get_alerts_with_filters
from keep.api.models.alert import AlertDto
@@ -26,7 +28,7 @@ def dispose(self):
"""
pass
- def _query(self, filters, **kwargs):
+ def _query(self, filters, distinct=True, **kwargs):
"""
Query Keep for alerts.
"""
@@ -34,13 +36,17 @@ def _query(self, filters, **kwargs):
self.context_manager.tenant_id, filters=filters
)
+ fingerprints = {}
alerts = []
if db_alerts:
for alert in db_alerts:
+ if fingerprints.get(alert.fingerprint) and distinct is True:
+ continue
alert_event = alert.event
if alert.alert_enrichment:
alert_event["enrichments"] = alert.alert_enrichment.enrichments
alerts.append(alert_event)
+ fingerprints[alert.fingerprint] = True
return alerts
def validate_config(self):
@@ -51,7 +57,9 @@ def validate_config(self):
pass
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["KeepProvider"]
+ ) -> AlertDto:
return AlertDto(
**event,
)
diff --git a/keep/providers/kibana_provider/kibana_provider.py b/keep/providers/kibana_provider/kibana_provider.py
index 5716777f9..210ef14da 100644
--- a/keep/providers/kibana_provider/kibana_provider.py
+++ b/keep/providers/kibana_provider/kibana_provider.py
@@ -5,7 +5,7 @@
import datetime
import json
import uuid
-from typing import Literal
+from typing import Literal, Optional
from urllib.parse import urlparse
import pydantic
@@ -466,7 +466,9 @@ def format_alert_from_watcher(event: dict) -> AlertDto | list[AlertDto]:
)
@staticmethod
- def _format_alert(event: dict) -> AlertDto | list[AlertDto]:
+ def _format_alert(
+ event: dict, provider_instance: Optional["KibanaProvider"]
+ ) -> AlertDto | list[AlertDto]:
"""
Formats an alert from Kibana to a standard format.
diff --git a/keep/providers/linearb_provider/linearb_provider.py b/keep/providers/linearb_provider/linearb_provider.py
index f02109777..f2b4804d1 100644
--- a/keep/providers/linearb_provider/linearb_provider.py
+++ b/keep/providers/linearb_provider/linearb_provider.py
@@ -117,12 +117,14 @@ def _notify(
team_names.append(team)
payload["teams"] = team_names
- if respository_urls and isinstance(respository_urls, str):
- respository_urls = json.loads(respository_urls)
+ if respository_urls:
+ if isinstance(respository_urls, str):
+ respository_urls = json.loads(respository_urls)
payload["respository_urls"] = respository_urls
- if services and isinstance(services, str):
- services = json.loads(services)
+ if services:
+ if isinstance(services, str):
+ services = json.loads(services)
payload["services"] = services
if started_at:
@@ -160,6 +162,16 @@ def _notify(
"teams": teams,
}
+ if respository_urls:
+ if isinstance(respository_urls, str):
+ respository_urls = json.loads(respository_urls)
+ payload["respository_urls"] = respository_urls
+
+ if services:
+ if isinstance(services, str):
+ services = json.loads(services)
+ payload["services"] = services
+
result = requests.post(
f"{self.LINEARB_API}/api/v1/incidents",
json=payload,
diff --git a/keep/providers/mock_provider/mock_provider.py b/keep/providers/mock_provider/mock_provider.py
index 12d207d28..a8af0f945 100644
--- a/keep/providers/mock_provider/mock_provider.py
+++ b/keep/providers/mock_provider/mock_provider.py
@@ -23,6 +23,14 @@ def _query(self, **kwargs):
"""
return kwargs.get("command_output")
+ def _notify(self, **kwargs):
+ """This is mock provider that just return the command output.
+
+ Returns:
+ _type_: _description_
+ """
+ return kwargs
+
def dispose(self):
"""
No need to dispose of anything, so just do nothing.
diff --git a/keep/providers/newrelic_provider/newrelic_provider.py b/keep/providers/newrelic_provider/newrelic_provider.py
index e8bd0ffb3..a609b70c6 100644
--- a/keep/providers/newrelic_provider/newrelic_provider.py
+++ b/keep/providers/newrelic_provider/newrelic_provider.py
@@ -5,6 +5,7 @@
import dataclasses
import json
from datetime import datetime
+from typing import Optional
import pydantic
import requests
@@ -46,6 +47,8 @@ class NewrelicProviderAuthConfig:
class NewrelicProvider(BaseProvider):
+ """Get alerts from New Relic into Keep."""
+
NEWRELIC_WEBHOOK_NAME = "keep-webhook"
PROVIDER_DISPLAY_NAME = "New Relic"
PROVIDER_SCOPES = [
@@ -418,12 +421,14 @@ def get_alerts(self) -> list[AlertDto]:
return formatted_alerts
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["NewrelicProvider"]
+ ) -> AlertDto:
"""We are already registering template same as generic AlertDTO"""
lastReceived = event["lastReceived"] if "lastReceived" in event else None
if lastReceived:
lastReceived = datetime.utcfromtimestamp(lastReceived / 1000).strftime(
- "%Y-%m-%d %H:%M:%S"
+ "%Y-%m-%d %H:%M:%SZ",
)
event["lastReceived"] = lastReceived
# format status and severity to Keep format
diff --git a/keep/providers/pagerduty_provider/pagerduty_provider.py b/keep/providers/pagerduty_provider/pagerduty_provider.py
index 3c446d6ab..1f0d2bb42 100644
--- a/keep/providers/pagerduty_provider/pagerduty_provider.py
+++ b/keep/providers/pagerduty_provider/pagerduty_provider.py
@@ -240,12 +240,14 @@ def _get_alerts(self) -> list[AlertDto]:
raise Exception("Could not get alerts")
incidents = request.json().get("incidents", [])
incidents = [
- self.format_alert({"event": {"data": incident}}) for incident in incidents
+ self._format_alert({"event": {"data": incident}}) for incident in incidents
]
return incidents
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: typing.Optional["PagerdutyProvider"]
+ ) -> AlertDto:
actual_event = event.get("event", {})
data = actual_event.get("data", {})
url = data.pop("self", data.pop("html_url"))
diff --git a/keep/providers/parseable_provider/parseable_provider.py b/keep/providers/parseable_provider/parseable_provider.py
index 3843032fc..10b20e19d 100644
--- a/keep/providers/parseable_provider/parseable_provider.py
+++ b/keep/providers/parseable_provider/parseable_provider.py
@@ -6,6 +6,7 @@
import json
import logging
import os
+from typing import Optional
from uuid import uuid4
import pydantic
@@ -118,7 +119,9 @@ def validate_config(self):
)
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["ParseableProvider"]
+ ) -> AlertDto:
environment = "unknown"
id = event.pop("id", str(uuid4()))
name = event.pop("alert", "")
diff --git a/keep/providers/pingdom_provider/pingdom_provider.py b/keep/providers/pingdom_provider/pingdom_provider.py
index 0c025c8e6..24d5e5896 100644
--- a/keep/providers/pingdom_provider/pingdom_provider.py
+++ b/keep/providers/pingdom_provider/pingdom_provider.py
@@ -1,5 +1,6 @@
import dataclasses
import datetime
+from typing import Optional
import pydantic
import requests
@@ -143,7 +144,9 @@ def _get_alerts(self) -> list[AlertDto]:
return alerts_dtos
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["PingdomProvider"]
+ ) -> AlertDto:
# https://pingdom.com/resources/webhooks/#Examples-of-webhook-JSON-output-for-uptime-checks
# map severity and status to keep's format
diff --git a/keep/providers/prometheus_provider/prometheus_provider.py b/keep/providers/prometheus_provider/prometheus_provider.py
index 2a45f8ede..19ae414ad 100644
--- a/keep/providers/prometheus_provider/prometheus_provider.py
+++ b/keep/providers/prometheus_provider/prometheus_provider.py
@@ -5,6 +5,7 @@
import dataclasses
import datetime
import os
+from typing import Optional
import pydantic
import requests
@@ -132,7 +133,7 @@ def _get_alerts(self) -> list[AlertDto]:
if not response.ok:
return []
alerts_data = response.json().get("data", {})
- alert_dtos = self.format_alert(alerts_data)
+ alert_dtos = self._format_alert(alerts_data)
return alert_dtos
def get_status(event: dict) -> AlertStatus:
@@ -141,7 +142,9 @@ def get_status(event: dict) -> AlertStatus:
)
@staticmethod
- def _format_alert(event: dict) -> list[AlertDto]:
+ def _format_alert(
+ event: dict, provider_instance: Optional["PrometheusProvider"]
+ ) -> list[AlertDto]:
# TODO: need to support more than 1 alert per event
alert_dtos = []
alerts = event.get("alerts", [event])
@@ -197,8 +200,8 @@ def notify(self, **kwargs):
"""
raise NotImplementedError("Prometheus provider does not support notify()")
- @staticmethod
- def simulate_alert(**kwargs) -> dict:
+ @classmethod
+ def simulate_alert(cls, **kwargs) -> dict:
"""Mock a Prometheus alert."""
import hashlib
import json
diff --git a/keep/providers/sentry_provider/alerts_mock.py b/keep/providers/sentry_provider/alerts_mock.py
new file mode 100644
index 000000000..b50d13cf5
--- /dev/null
+++ b/keep/providers/sentry_provider/alerts_mock.py
@@ -0,0 +1,398 @@
+ALERTS = {
+ "simulate": {
+ "payload": {
+ "id": "4616132096",
+ "project": "python",
+ "project_name": "python",
+ "project_slug": "python",
+ "logger": None,
+ "level": "error",
+ "culprit": "raven.scripts.runner in main",
+ "message": "This is an example Python exception",
+ "url": "https://keep-dr.sentry.io/issues/4616132096/?referrer=webhooks_plugin",
+ "triggering_rules": [],
+ "event": {
+ "event_id": "d432ae7c01c640b597831fb1710e3413",
+ "level": "error",
+ "version": "5",
+ "type": "default",
+ "logentry": {
+ "formatted": "This is an example Python exception",
+ "message": None,
+ "params": None,
+ },
+ "logger": "",
+ "modules": {"my.package": "1.0.0"},
+ "platform": "python",
+ "timestamp": 1709991185.873,
+ "received": 1709991245.874362,
+ "environment": "prod",
+ "user": {
+ "id": "1",
+ "email": "sentry@example.com",
+ "ip_address": "127.0.0.1",
+ "username": "sentry",
+ "name": "Sentry",
+ "geo": {"country_code": "AU", "city": "Melbourne", "region": "VIC"},
+ "sentry_user": "id:1",
+ },
+ "request": {
+ "url": "http://example.com/foo",
+ "method": "GET",
+ "data": {"hello": "world"},
+ "query_string": [["foo", "bar"]],
+ "cookies": [["foo", "bar"], ["biz", "baz"]],
+ "headers": [
+ ["Content-Type", "application/json"],
+ ["Referer", "http://example.com"],
+ [
+ "User-Agent",
+ "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36",
+ ],
+ ],
+ "env": {"ENV": "prod"},
+ "inferred_content_type": "application/json",
+ "api_target": None,
+ "fragment": None,
+ },
+ "contexts": {
+ "browser": {
+ "name": "Chrome",
+ "version": "28.0.1500",
+ "type": "browser",
+ },
+ "client_os": {"name": "Windows", "version": "8", "type": "os"},
+ },
+ "stacktrace": {
+ "frames": [
+ {
+ "function": "build_msg",
+ "module": "raven.base",
+ "filename": "raven/base.py",
+ "abs_path": "/home/ubuntu/.virtualenvs/getsentry/src/raven/raven/base.py",
+ "lineno": 303,
+ "pre_context": [
+ " frames = stack",
+ "",
+ " data.update({",
+ " 'sentry.interfaces.Stacktrace': {",
+ " 'frames': get_stack_info(frames,",
+ ],
+ "context_line": " transformer=self.transform)",
+ "post_context": [
+ " },",
+ " })",
+ "",
+ " if 'sentry.interfaces.Stacktrace' in data:",
+ " if self.include_paths:",
+ ],
+ "in_app": False,
+ "vars": {
+ "'culprit'": None,
+ "'data'": {
+ "'message'": "u'This is a test message generated using ``raven test``'",
+ "'sentry.interfaces.Message'": {
+ "'message'": "u'This is a test message generated using ``raven test``'",
+ "'params'": [],
+ },
+ },
+ "'date'": "datetime.datetime(2013, 8, 13, 3, 8, 24, 880386)",
+ "'event_id'": "'54a322436e1b47b88e239b78998ae742'",
+ "'event_type'": "'raven.events.Message'",
+ "'extra'": {
+ "'go_deeper'": [
+ ["{\"'bar'\":[\"'baz'\"],\"'foo'\":\"'bar'\"}"]
+ ],
+ "'loadavg'": [
+ 0.37255859375,
+ 0.5341796875,
+ 0.62939453125,
+ ],
+ "'user'": "'dcramer'",
+ },
+ "'frames'": "
",
+ "'handler'": "",
+ "'k'": "'sentry.interfaces.Message'",
+ "'kwargs'": {
+ "'level'": 20,
+ "'message'": "'This is a test message generated using ``raven test``'",
+ },
+ "'public_key'": None,
+ "'result'": {
+ "'message'": "u'This is a test message generated using ``raven test``'",
+ "'sentry.interfaces.Message'": {
+ "'message'": "u'This is a test message generated using ``raven test``'",
+ "'params'": [],
+ },
+ },
+ "'self'": "",
+ "'stack'": True,
+ "'tags'": None,
+ "'time_spent'": None,
+ "'v'": {
+ "'message'": "u'This is a test message generated using ``raven test``'",
+ "'params'": [],
+ },
+ },
+ "colno": None,
+ "data": None,
+ "errors": None,
+ "raw_function": None,
+ "image_addr": None,
+ "instruction_addr": None,
+ "addr_mode": None,
+ "package": None,
+ "platform": None,
+ "source_link": None,
+ "symbol": None,
+ "symbol_addr": None,
+ "trust": None,
+ "snapshot": None,
+ "lock": None,
+ },
+ {
+ "function": "capture",
+ "module": "raven.base",
+ "filename": "raven/base.py",
+ "abs_path": "/home/ubuntu/.virtualenvs/getsentry/src/raven/raven/base.py",
+ "lineno": 459,
+ "pre_context": [
+ " if not self.is_enabled():",
+ " return",
+ "",
+ " data = self.build_msg(",
+ " event_type, data, date, time_spent, extra, stack, tags=tags,",
+ ],
+ "context_line": " **kwargs)",
+ "post_context": [
+ "",
+ " self.send(**data)",
+ "",
+ " return (data.get('event_id'),)",
+ "",
+ ],
+ "in_app": False,
+ "vars": {
+ "'data'": None,
+ "'date'": None,
+ "'event_type'": "'raven.events.Message'",
+ "'extra'": {
+ "'go_deeper'": [
+ ["{\"'bar'\":[\"'baz'\"],\"'foo'\":\"'bar'\"}"]
+ ],
+ "'loadavg'": [
+ 0.37255859375,
+ 0.5341796875,
+ 0.62939453125,
+ ],
+ "'user'": "'dcramer'",
+ },
+ "'kwargs'": {
+ "'level'": 20,
+ "'message'": "'This is a test message generated using ``raven test``'",
+ },
+ "'self'": "",
+ "'stack'": True,
+ "'tags'": None,
+ "'time_spent'": None,
+ },
+ "colno": None,
+ "data": None,
+ "errors": None,
+ "raw_function": None,
+ "image_addr": None,
+ "instruction_addr": None,
+ "addr_mode": None,
+ "package": None,
+ "platform": None,
+ "source_link": None,
+ "symbol": None,
+ "symbol_addr": None,
+ "trust": None,
+ "snapshot": None,
+ "lock": None,
+ },
+ {
+ "function": "captureMessage",
+ "module": "raven.base",
+ "filename": "raven/base.py",
+ "abs_path": "/home/ubuntu/.virtualenvs/getsentry/src/raven/raven/base.py",
+ "lineno": 577,
+ "pre_context": [
+ ' """',
+ " Creates an event from ``message``.",
+ "",
+ " >>> client.captureMessage('My event just happened!')",
+ ' """',
+ ],
+ "context_line": " return self.capture('raven.events.Message', message=message, **kwargs)",
+ "post_context": [
+ "",
+ " def captureException(self, exc_info=None, **kwargs):",
+ ' """',
+ " Creates an event from an exception.",
+ "",
+ ],
+ "in_app": False,
+ "vars": {
+ "'kwargs'": {
+ "'data'": None,
+ "'extra'": {
+ "'go_deeper'": [
+ "[{\"'bar'\":[\"'baz'\"],\"'foo'\":\"'bar'\"}]"
+ ],
+ "'loadavg'": [
+ 0.37255859375,
+ 0.5341796875,
+ 0.62939453125,
+ ],
+ "'user'": "'dcramer'",
+ },
+ "'level'": 20,
+ "'stack'": True,
+ "'tags'": None,
+ },
+ "'message'": "'This is a test message generated using ``raven test``'",
+ "'self'": "",
+ },
+ "colno": None,
+ "data": None,
+ "errors": None,
+ "raw_function": None,
+ "image_addr": None,
+ "instruction_addr": None,
+ "addr_mode": None,
+ "package": None,
+ "platform": None,
+ "source_link": None,
+ "symbol": None,
+ "symbol_addr": None,
+ "trust": None,
+ "snapshot": None,
+ "lock": None,
+ },
+ {
+ "function": "send_test_message",
+ "module": "raven.scripts.runner",
+ "filename": "raven/scripts/runner.py",
+ "abs_path": "/home/ubuntu/.virtualenvs/getsentry/src/raven/raven/scripts/runner.py",
+ "lineno": 77,
+ "pre_context": [
+ " level=logging.INFO,",
+ " stack=True,",
+ " tags=options.get('tags', {}),",
+ " extra={",
+ " 'user': get_uid(),",
+ ],
+ "context_line": " 'loadavg': get_loadavg(),",
+ "post_context": [
+ " },",
+ " ))",
+ "",
+ " if client.state.did_fail():",
+ " print('error!')",
+ ],
+ "in_app": False,
+ "vars": {
+ "'client'": "",
+ "'data'": None,
+ "'k'": "'secret_key'",
+ "'options'": {"'data'": None, "'tags'": None},
+ },
+ "colno": None,
+ "data": None,
+ "errors": None,
+ "raw_function": None,
+ "image_addr": None,
+ "instruction_addr": None,
+ "addr_mode": None,
+ "package": None,
+ "platform": None,
+ "source_link": None,
+ "symbol": None,
+ "symbol_addr": None,
+ "trust": None,
+ "snapshot": None,
+ "lock": None,
+ },
+ {
+ "function": "main",
+ "module": "raven.scripts.runner",
+ "filename": "raven/scripts/runner.py",
+ "abs_path": "/home/ubuntu/.virtualenvs/getsentry/src/raven/raven/scripts/runner.py",
+ "lineno": 112,
+ "pre_context": [
+ ' print("Using DSN configuration:")',
+ ' print(" ", dsn)',
+ " print()",
+ "",
+ " client = Client(dsn, include_paths=['raven'])",
+ ],
+ "context_line": " send_test_message(client, opts.__dict__)",
+ "in_app": False,
+ "vars": {
+ "'args'": [
+ "'test'",
+ ],
+ "'client'": "",
+ "'opts'": "",
+ "'parser'": "",
+ "'root'": "",
+ },
+ "colno": None,
+ "data": None,
+ "errors": None,
+ "raw_function": None,
+ "image_addr": None,
+ "instruction_addr": None,
+ "addr_mode": None,
+ "package": None,
+ "platform": None,
+ "post_context": None,
+ "source_link": None,
+ "symbol": None,
+ "symbol_addr": None,
+ "trust": None,
+ "snapshot": None,
+ "lock": None,
+ },
+ ]
+ },
+ "tags": [
+ ["browser", "Chrome 28.0.1500"],
+ ["browser.name", "Chrome"],
+ ["client_os", "Windows 8"],
+ ["client_os.name", "Windows"],
+ ["environment", "prod"],
+ ["level", "error"],
+ ["sentry:user", "id:1"],
+ ["server_name", "web01.example.org"],
+ ["url", "http://example.com/foo"],
+ ],
+ "extra": {
+ "emptyList": [],
+ "emptyMap": {},
+ "length": 10837790,
+ "results": [1, 2, 3, 4, 5],
+ "session": {"foo": "bar"},
+ "unauthorized": False,
+ "url": "http://example.org/foo/bar/",
+ },
+ "metadata": {
+ "title": "This is an example Python exception",
+ "in_app_frame_mix": "system-only",
+ },
+ "fingerprint": ["{{ default }}"],
+ "hashes": ["3a2b45089d0211943e5a6645fb4cea3f"],
+ "culprit": "raven.scripts.runner in main",
+ "title": "This is an example Python exception",
+ "location": None,
+ "_ref": 4506019251093504,
+ "_ref_version": 2,
+ "_metrics": {"bytes.stored.event": 8274},
+ "nodestore_insert": 1709991246.811978,
+ "id": "d432ae7c01c640b597831fb1710e3413",
+ },
+ }
+ }
+}
diff --git a/keep/providers/sentry_provider/sentry_provider.py b/keep/providers/sentry_provider/sentry_provider.py
index 14df04ecb..72db6a20c 100644
--- a/keep/providers/sentry_provider/sentry_provider.py
+++ b/keep/providers/sentry_provider/sentry_provider.py
@@ -5,6 +5,7 @@
import dataclasses
import datetime
import logging
+from typing import Optional
import pydantic
import requests
@@ -21,6 +22,14 @@
class SentryProviderAuthConfig:
"""Sentry authentication configuration."""
+ api_url: str = dataclasses.field(
+ metadata={
+ "required": False,
+ "description": "Sentry API URL",
+ "hint": "https://sentry.io/api/0 (see https://docs.sentry.io/api/)",
+ "sensitive": False,
+ }
+ )
api_key: str = dataclasses.field(
metadata={
"required": True,
@@ -45,7 +54,7 @@ class SentryProviderAuthConfig:
class SentryProvider(BaseProvider):
"""Enrich alerts with data from Sentry."""
- SENTRY_API = "https://sentry.io/api/0"
+ SENTRY_DEFAULT_API = "https://sentry.io/api/0"
PROVIDER_SCOPES = [
ProviderScope(
"event:read",
@@ -88,13 +97,16 @@ def __init__(
super().__init__(context_manager, provider_id, config)
self.sentry_org_slug = self.config.authentication.get("organization_slug")
self.project_slug = self.config.authentication.get("project_slug")
+ self.sentry_api = self.config.authentication.get(
+ "api_url", self.SENTRY_DEFAULT_API
+ )
@property
def __headers(self) -> dict:
return {"Authorization": f"Bearer {self.authentication_config.api_key}"}
def get_events_url(self, project, date="14d"):
- return f"{self.SENTRY_API}/organizations/{self.sentry_org_slug}/events/?field=title&field=event.type&field=project&field=user.display&field=timestamp&field=replayId&per_page=50 \
+ return f"{self.sentry_api}/organizations/{self.sentry_org_slug}/events/?field=title&field=event.type&field=project&field=user.display&field=timestamp&field=replayId&per_page=50 \
&query={project}&referrer=api.discover.query-table&sort=-timestamp&statsPeriod={date}"
def dispose(self):
@@ -139,7 +151,7 @@ def validate_scopes(self) -> dict[str, bool | str]:
if scope.name == "event:read":
if self.project_slug:
response = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{self.project_slug}/issues/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{self.project_slug}/issues/",
headers=self.__headers,
)
if not response.ok:
@@ -148,7 +160,7 @@ def validate_scopes(self) -> dict[str, bool | str]:
continue
else:
projects_response = requests.get(
- f"{self.SENTRY_API}/projects/",
+ f"{self.sentry_api}/projects/",
headers=self.__headers,
)
if not projects_response.ok:
@@ -158,7 +170,7 @@ def validate_scopes(self) -> dict[str, bool | str]:
projects = projects_response.json()
project_slug = projects[0].get("slug")
response = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/issues/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/issues/",
headers=self.__headers,
)
if not response.ok:
@@ -168,7 +180,7 @@ def validate_scopes(self) -> dict[str, bool | str]:
validated_scopes[scope.name] = True
elif scope.name == "project:read":
response = requests.get(
- f"{self.SENTRY_API}/projects/",
+ f"{self.sentry_api}/projects/",
headers=self.__headers,
)
if not response.ok:
@@ -178,7 +190,7 @@ def validate_scopes(self) -> dict[str, bool | str]:
validated_scopes[scope.name] = True
elif scope.name == "project:write":
response = requests.post(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{self.project_slug or project_slug}/plugins/webhooks/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{self.project_slug or project_slug}/plugins/webhooks/",
headers=self.__headers,
)
if not response.ok:
@@ -189,7 +201,9 @@ def validate_scopes(self) -> dict[str, bool | str]:
return validated_scopes
@staticmethod
- def _format_alert(event: dict) -> AlertDto | list[AlertDto]:
+ def _format_alert(
+ event: dict, provider_instance: Optional["SentryProvider"]
+ ) -> AlertDto | list[AlertDto]:
logger = logging.getLogger(__name__)
logger.info(
"Formatting Sentry alert",
@@ -271,7 +285,7 @@ def setup_webhook(
else:
# Get all projects if no project slug was given
projects_response = requests.get(
- f"{self.SENTRY_API}/projects/",
+ f"{self.sentry_api}/projects/",
headers=self.__headers,
)
if not projects_response.ok:
@@ -283,7 +297,7 @@ def setup_webhook(
for project_slug in project_slugs:
self.logger.info(f"Setting up webhook for project {project_slug}")
webhooks_request = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/plugins/webhooks/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/plugins/webhooks/",
headers=self.__headers,
)
webhooks_request.raise_for_status()
@@ -315,20 +329,20 @@ def setup_webhook(
existing_webhooks.append(f"{keep_api_url}&api_key={api_key}")
# Update the webhooks urls
update_response = requests.put(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/plugins/webhooks/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/plugins/webhooks/",
headers=self.__headers,
json={"urls": "\n".join(existing_webhooks)},
)
update_response.raise_for_status()
# Enable webhooks plugin for project
requests.post(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/plugins/webhooks/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/plugins/webhooks/",
headers=self.__headers,
).raise_for_status()
# TODO: make sure keep alert does not exist and if it doesnt create it.
alert_rule_name = f"Keep Alert Rule - {project_slug}"
alert_rules_response = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/rules/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/rules/",
headers=self.__headers,
).json()
alert_rule_exists = next(
@@ -365,7 +379,7 @@ def setup_webhook(
}
try:
requests.post(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/rules/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/rules/",
headers=self.__headers,
json=alert_payload,
).raise_for_status()
@@ -396,7 +410,7 @@ def __get_issues(self, project_slug: str) -> dict:
dict: issues by id
"""
issues_response = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/issues/?query=*",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/issues/?query=*",
headers=self.__headers,
)
if not issues_response.ok:
@@ -408,7 +422,7 @@ def _get_alerts(self) -> list[AlertDto]:
all_issues_by_project = {}
if self.authentication_config.project_slug:
response = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{self.project_slug}/events/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{self.project_slug}/events/",
headers=self.__headers,
timeout=SentryProvider.DEFAULT_TIMEOUT,
)
@@ -420,7 +434,7 @@ def _get_alerts(self) -> list[AlertDto]:
)
else:
projects_response = requests.get(
- f"{self.SENTRY_API}/projects/",
+ f"{self.sentry_api}/projects/",
headers=self.__headers,
timeout=SentryProvider.DEFAULT_TIMEOUT,
)
@@ -430,7 +444,7 @@ def _get_alerts(self) -> list[AlertDto]:
for project in projects:
project_slug = project.get("slug")
response = requests.get(
- f"{self.SENTRY_API}/projects/{self.sentry_org_slug}/{project_slug}/events/",
+ f"{self.sentry_api}/projects/{self.sentry_org_slug}/{project_slug}/events/",
headers=self.__headers,
timeout=SentryProvider.DEFAULT_TIMEOUT,
)
@@ -499,25 +513,30 @@ def _get_alerts(self) -> list[AlertDto]:
tenant_id="singletenant",
workflow_id="test",
)
+
# Load environment variables
import os
+ sentry_api_url = os.environ.get("SENTRY_API_URL")
sentry_api_token = os.environ.get("SENTRY_API_TOKEN")
sentry_org_slug = os.environ.get("SENTRY_ORG_SLUG")
sentry_project_slug = os.environ.get("SENTRY_PROJECT_SLUG")
config = {
"authentication": {
+ "api_url": sentry_api_url,
"api_key": sentry_api_token,
"organization_slug": sentry_org_slug,
"project_slug": sentry_project_slug,
},
}
+
provider = ProvidersFactory.get_provider(
context_manager,
provider_id="sentry-prod",
provider_type="sentry",
provider_config=config,
)
+
alerts = provider.get_alerts()
print(alerts)
diff --git a/keep/providers/signalfx_provider/alerts_mock.py b/keep/providers/signalfx_provider/alerts_mock.py
index e49d503cc..39f268051 100644
--- a/keep/providers/signalfx_provider/alerts_mock.py
+++ b/keep/providers/signalfx_provider/alerts_mock.py
@@ -1,17 +1,27 @@
ALERTS = {
- "severity": "Critical",
- "originatingMetric": "sf.org.log.numMessagesDroppedThrottle",
- "detectOnCondition": "when(A < threshold(1))",
- "messageBody": 'Rule "logs" in detector "logs" cleared at Thu, 29 Feb 2024 11:48:32 GMT.\n\nCurrent signal value for sf.org.log.numMessagesDroppedThrottle: 0\n\nSignal details:\n{sf_metric=sf.org.log.numMessagesDroppedThrottle, orgId=XXXX}',
- "inputs": {
- "A": {"value": "0", "fragment": "data(...A')", "key": {...}},
- "_S2": {"value": "1", "fragment": "threshold(1)"},
- },
- "rule": "logs",
- "description": "The value of sf.org.log.numMessagesDroppedThrottle is below 1.",
- "messageTitle": "Manually resolved: logs (logs)",
- "sf_schema": 2,
- "eventType": "XXXX_XXXX_logs",
- "runbookUrl": None,
- "triggeredWhileMuted": False,
+ "simulate": {
+ "payload": {
+ "severity": "Critical",
+ "statusExtended": "anomalous",
+ "detectorUrl": "https://app.signalfx.com/#/detector/XXXX",
+ "incidentId": "1234",
+ "originatingMetric": "sf.org.log.numMessagesDroppedThrottle",
+ "detectOnCondition": "when(A < threshold(1))",
+ "messageBody": 'Rule "logs" in detector "logs" cleared at Thu, 29 Feb 2024 11:48:32 GMT.\n\nCurrent signal value for sf.org.log.numMessagesDroppedThrottle: 0\n\nSignal details:\n{sf_metric=sf.org.log.numMessagesDroppedThrottle, orgId=XXXX}',
+ "inputs": {
+ "A": {
+ "value": "0",
+ "fragment": "data(...A')",
+ "_S2": {"value": "1", "fragment": "threshold(1)"},
+ },
+ "rule": "logs",
+ "description": "The value of sf.org.log.numMessagesDroppedThrottle is below 1.",
+ "messageTitle": "Manually resolved: logs (logs)",
+ "sf_schema": 2,
+ "eventType": "XXXX_XXXX_logs",
+ "runbookUrl": None,
+ "triggeredWhileMuted": False,
+ },
+ }
+ }
}
diff --git a/keep/providers/signalfx_provider/signalfx_provider.py b/keep/providers/signalfx_provider/signalfx_provider.py
index 53b91cfa3..8fc75005c 100644
--- a/keep/providers/signalfx_provider/signalfx_provider.py
+++ b/keep/providers/signalfx_provider/signalfx_provider.py
@@ -1,6 +1,7 @@
import base64
import dataclasses
import datetime
+from typing import Optional
from urllib.parse import quote, urlparse
import pydantic
@@ -72,6 +73,8 @@ class SignalfxProviderAuthConfig:
class SignalfxProvider(BaseProvider):
+ """Get alerts from SignalFx into Keep."""
+
PROVIDER_SCOPES = [
ProviderScope(
name="API",
@@ -208,7 +211,9 @@ def _format_alert_get_alert(self, incident: dict) -> AlertDto:
return alert_dto
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["SignalfxProvider"]
+ ) -> AlertDto:
# Transform a SignalFx event into an AlertDto object
# see: https://docs.splunk.com/observability/en/admin/notif-services/webhook.html#observability-cloud-webhook-request-body-fields
severity = SignalfxProvider.SEVERITIES_MAP.get(
diff --git a/keep/providers/splunk_provider/__init__.py b/keep/providers/splunk_provider/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/keep/providers/splunk_provider/splunk_provider.py b/keep/providers/splunk_provider/splunk_provider.py
new file mode 100644
index 000000000..85024c157
--- /dev/null
+++ b/keep/providers/splunk_provider/splunk_provider.py
@@ -0,0 +1,214 @@
+import dataclasses
+import datetime
+from typing import Optional
+
+import pydantic
+from splunklib.client import connect
+
+from keep.api.models.alert import AlertDto, AlertSeverity
+from keep.contextmanager.contextmanager import ContextManager
+from keep.providers.base.base_provider import BaseProvider
+from keep.providers.models.provider_config import ProviderConfig, ProviderScope
+from keep.providers.providers_factory import ProvidersFactory
+
+
+@pydantic.dataclasses.dataclass
+class SplunkProviderAuthConfig:
+ api_key: str = dataclasses.field(
+ metadata={
+ "required": True,
+ "description": "Splunk API Key",
+ "sensitive": True,
+ }
+ )
+
+ host: str = dataclasses.field(
+ metadata={
+ "description": "Splunk Host (default is localhost)",
+ },
+ default="localhost",
+ )
+ port: int = dataclasses.field(
+ metadata={
+ "description": "Splunk Port (default is 8089)",
+ },
+ default=8089,
+ )
+
+
+class SplunkProvider(BaseProvider):
+ """Pull alerts and query incidents from Splunk."""
+
+ PROVIDER_SCOPES = [
+ ProviderScope(
+ name="authenticated",
+ description="The user can connect to the client",
+ mandatory=True,
+ alias="Connect to the client",
+ ),
+ ProviderScope(
+ name="list_all_objects",
+ description="The user can get all the alerts",
+ mandatory=True,
+ alias="List all Alerts",
+ ),
+ ProviderScope(
+ name="edit_own_objects",
+ description="The user can edit and add webhook to saved_searches",
+ mandatory=True,
+ alias="Needed to connect to webhook",
+ ),
+ ]
+
+ SEVERITIES_MAP = {
+ "1": AlertSeverity.LOW,
+ "2": AlertSeverity.INFO,
+ "3": AlertSeverity.WARNING,
+ "4": AlertSeverity.HIGH,
+ "5": AlertSeverity.CRITICAL,
+ }
+
+ def __init__(
+ self, context_manager: ContextManager, provider_id: str, config: ProviderConfig
+ ):
+ super().__init__(context_manager, provider_id, config)
+
+ def validate_scopes(self) -> dict[str, bool | str]:
+ list_all_objects_scope = "NOT_FOUND"
+ edit_own_object_scope = "NOT_FOUND"
+ try:
+ service = connect(
+ token=self.authentication_config.api_key,
+ host=self.authentication_config.host,
+ port=self.authentication_config.port,
+ )
+ for user in service.users:
+ user_roles = user.content["roles"]
+ for role_name in user_roles:
+ perms = self.__get_role_capabilities(
+ role_name=role_name, service=service
+ )
+ if not list_all_objects_scope and "list_all_objects" in perms:
+ list_all_objects_scope = True
+ if not edit_own_object_scope and "edit_own_objects" in perms:
+ edit_own_object_scope = True
+ if list_all_objects_scope and edit_own_object_scope:
+ break
+
+ scopes = {
+ "authenticated": True,
+ "list_all_objects": list_all_objects_scope,
+ "edit_own_objects": edit_own_object_scope,
+ }
+ except Exception as e:
+ self.logger.exception("Error validating scopes")
+ scopes = {
+ "connect_to_client": str(e),
+ "list_all_objects": "UNAUTHENTICATED",
+ "edit_own_objects": "UNAUTHENTICATED",
+ }
+ return scopes
+
+ def validate_config(self):
+ self.authentication_config = SplunkProviderAuthConfig(
+ **self.config.authentication
+ )
+
+ def __get_role_capabilities(self, role_name, service):
+ role = service.roles[role_name]
+ return role.content["capabilities"] + role.content["imported_capabilities"]
+
+ def dispose(self):
+ """
+ No need to dispose of anything, so just do nothing.
+ """
+ pass
+
+ def setup_webhook(
+ self, tenant_id: str, keep_api_url: str, api_key: str, setup_alerts: bool = True
+ ):
+ self.logger.info("Setting up Splunk webhook on all Alerts")
+ creation_updation_kwargs = {
+ "actions": "webhook",
+ "action.webhook": "1",
+ "action.webhook.param.url": keep_api_url,
+ }
+ service = connect(
+ token=self.authentication_config.api_key,
+ host=self.authentication_config.host,
+ port=self.authentication_config.port,
+ )
+ for saved_search in service.saved_searches:
+ existing_webhook_url = saved_search["_state"]["content"].get(
+ "action.webhook.param.url", None
+ )
+ if existing_webhook_url is None or existing_webhook_url != keep_api_url:
+ saved_search.update(**creation_updation_kwargs).refresh()
+
+ @staticmethod
+ def _format_alert(
+ event: dict, provider_instance: Optional["SplunkProvider"]
+ ) -> AlertDto:
+ if not provider_instance:
+ return AlertDto(
+ id=event["sid"],
+ name=event["search_name"],
+ source=["splunk"],
+ url=event["results_link"],
+ lastReceived=datetime.datetime.now(datetime.timezone.utc).isoformat(),
+ severity=SplunkProvider.SEVERITIES_MAP.get("1"),
+ status="firing",
+ **event
+ )
+
+ search_id = event["sid"]
+ service = connect(
+ token=provider_instance.authentication_config.api_key,
+ host=provider_instance.authentication_config.host,
+ port=provider_instance.authentication_config.port,
+ )
+ saved_search = service.saved_searches[search_id]
+ return AlertDto(
+ id=event["sid"],
+ name=event["search_name"],
+ source=["splunk"],
+ url=event["results_link"],
+ severity=SplunkProvider.SEVERITIES_MAP.get(
+ saved_search["_state"]["content"]["alert.severity"]
+ ),
+ lastReceived=datetime.datetime.now(datetime.timezone.utc).isoformat(),
+ description=saved_search["_state"]["content"]["description"],
+ status="firing",
+ **event
+ )
+
+
+if __name__ == "__main__":
+ # Output debug messages
+ import logging
+
+ logging.basicConfig(level=logging.DEBUG, handlers=[logging.StreamHandler()])
+ context_manager = ContextManager(
+ tenant_id="singletenant",
+ workflow_id="test",
+ )
+ # Load environment variables
+ import os
+
+ api_key = os.environ.get("SPLUNK_API_KEY")
+
+ provider_config = {
+ "authentication": {"api_key": api_key},
+ }
+ provider = ProvidersFactory.get_provider(
+ context_manager=context_manager,
+ provider_id="keep-pd",
+ provider_type="splunk",
+ provider_config=provider_config,
+ )
+ results = provider.setup_webhook(
+ "keep",
+ "https://eb8a-77-137-44-66.ngrok-free.app/alerts/event/splunk?provider_id=keep-pd",
+ "just-a-test",
+ True,
+ )
diff --git a/keep/providers/squadcast_provider/squadcast_provider.py b/keep/providers/squadcast_provider/squadcast_provider.py
index 7b4ee7dad..49ed495a9 100644
--- a/keep/providers/squadcast_provider/squadcast_provider.py
+++ b/keep/providers/squadcast_provider/squadcast_provider.py
@@ -21,7 +21,7 @@ class SquadcastProviderAuthConfig:
"required": True,
"description": "Service region: EU/US",
"hint": "https://apidocs.squadcast.com/#intro",
- "sensitive": False
+ "sensitive": False,
}
)
refresh_token: str | None = dataclasses.field(
@@ -31,7 +31,7 @@ class SquadcastProviderAuthConfig:
"hint": "https://support.squadcast.com/docs/squadcast-public-api",
"sensitive": True,
},
- default=None
+ default=None,
)
webhook_url: str | None = dataclasses.field(
metadata={
@@ -40,7 +40,7 @@ class SquadcastProviderAuthConfig:
"hint": "https://support.squadcast.com/integrations/incident-webhook-incident-webhook-api",
"sensitive": True,
},
- default=None
+ default=None,
)
@@ -59,7 +59,7 @@ class SquadcastProvider(BaseProvider):
]
def __init__(
- self, context_manager: ContextManager, provider_id: str, config: ProviderConfig
+ self, context_manager: ContextManager, provider_id: str, config: ProviderConfig
):
super().__init__(context_manager, provider_id, config)
@@ -69,9 +69,11 @@ def validate_scopes(self):
"""
refresh_headers = {
"content-type": "application/json",
- "X-Refresh-Token": f"{self.authentication_config.refresh_token}"
+ "X-Refresh-Token": f"{self.authentication_config.refresh_token}",
}
- resp = requests.get(f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers)
+ resp = requests.get(
+ f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers
+ )
try:
resp.raise_for_status()
scopes = {
@@ -85,80 +87,126 @@ def validate_scopes(self):
return scopes
def __get_endpoint(self, endpoint: str):
- if endpoint == 'auth':
- return ('https://auth.eu.squadcast.com', 'https://auth.squadcast.com')[
- self.authentication_config.service_region == 'US']
- elif endpoint == 'api':
- return ('https://api.eu.squadcast.com', 'https://api.squadcast.com')[
- self.authentication_config.service_region == 'US']
+ if endpoint == "auth":
+ return ("https://auth.eu.squadcast.com", "https://auth.squadcast.com")[
+ self.authentication_config.service_region == "US"
+ ]
+ elif endpoint == "api":
+ return ("https://api.eu.squadcast.com", "https://api.squadcast.com")[
+ self.authentication_config.service_region == "US"
+ ]
def validate_config(self):
self.authentication_config = SquadcastProviderAuthConfig(
**self.config.authentication
)
if (
- not self.authentication_config.refresh_token
- and not self.authentication_config.webhook_url
+ not self.authentication_config.refresh_token
+ and not self.authentication_config.webhook_url
):
raise ProviderConfigException(
"SquadcastProvider requires either refresh_token or webhook_url",
provider_id=self.provider_id,
)
- def _create_incidents(self, headers: dict, message: str, description: str, priority: str = "",
- status: str = "",
- event_id: str = ""):
-
- body = json.dumps({
- "message": message,
- "description": description,
- "priority": priority,
- "status": status,
- "event_id": event_id
- })
-
- return requests.post(self.authentication_config.webhook_url, data=body, headers=headers)
-
- def _crete_notes(self, headers: dict, message: str, incident_id: str, attachments: list = []):
- body = json.dumps({
- "message": message,
- "attachments": attachments
- })
- return requests.post(f"{self.__get_endpoint('api')}/v3/incidents/{incident_id}/warroom", data=body,
- headers=headers)
-
- def _notify(self, notify_type: str, message: str = "", description: str = "", incident_id: str = "",
- priority: str = "",
- status: str = "",
- event_id: str = "", attachments: list = [], **kwargs) -> dict:
+ def _create_incidents(
+ self,
+ headers: dict,
+ message: str,
+ description: str,
+ priority: str = "",
+ status: str = "",
+ event_id: str = "",
+ additional_json: str = "",
+ ):
+ body = json.dumps(
+ {
+ "message": message,
+ "description": description,
+ "priority": priority,
+ "status": status,
+ "event_id": event_id,
+ }
+ )
+
+ # append body to additional_json we are doing this way because we don't want to override the core body fields
+ body = json.dumps({**json.loads(additional_json), **json.loads(body)})
+
+ return requests.post(
+ self.authentication_config.webhook_url, data=body, headers=headers
+ )
+
+ def _crete_notes(
+ self, headers: dict, message: str, incident_id: str, attachments: list = []
+ ):
+ body = json.dumps({"message": message, "attachments": attachments})
+ return requests.post(
+ f"{self.__get_endpoint('api')}/v3/incidents/{incident_id}/warroom",
+ data=body,
+ headers=headers,
+ )
+
+ def _notify(
+ self,
+ notify_type: str,
+ message: str = "",
+ description: str = "",
+ incident_id: str = "",
+ priority: str = "",
+ status: str = "",
+ event_id: str = "",
+ attachments: list = [],
+ additional_json: str = "",
+ **kwargs,
+ ) -> dict:
"""
Create an incident or notes using the Squadcast API.
"""
+
self.logger.info(
f"Creating {notify_type} using SquadcastProvider",
- extra={
- notify_type: notify_type
- })
+ extra={notify_type: notify_type},
+ )
refresh_headers = {
"content-type": "application/json",
- "X-Refresh-Token": f"{self.authentication_config.refresh_token}"
+ "X-Refresh-Token": f"{self.authentication_config.refresh_token}",
}
- api_key_resp = requests.get(f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers)
+ api_key_resp = requests.get(
+ f"{self.__get_endpoint('auth')}/oauth/access-token", headers=refresh_headers
+ )
headers = {
"content-type": "application/json",
"Authorization": f"Bearer {api_key_resp.json()['data']['access_token']}",
}
- if notify_type == 'incident':
+ if notify_type == "incident":
if message == "" or description == "":
- raise Exception(f"message: \"{message}\" and description: \"{description}\" cannot be empty")
- resp = self._create_incidents(headers=headers, message=message, description=description, priority=priority,
- status=status, event_id=event_id)
- elif notify_type == 'notes':
+ raise Exception(
+ f'message: "{message}" and description: "{description}" cannot be empty'
+ )
+ resp = self._create_incidents(
+ headers=headers,
+ message=message,
+ description=description,
+ priority=priority,
+ status=status,
+ event_id=event_id,
+ additional_json=additional_json,
+ )
+ elif notify_type == "notes":
if message == "" or incident_id == "":
- raise Exception(f"message: \"{message}\" and incident_id: \"{incident_id}\" cannot be empty")
- resp = self._crete_notes(headers=headers, message=message, incident_id=incident_id, attachments=attachments)
+ raise Exception(
+ f'message: "{message}" and incident_id: "{incident_id}" cannot be empty'
+ )
+ resp = self._crete_notes(
+ headers=headers,
+ message=message,
+ incident_id=incident_id,
+ attachments=attachments,
+ )
else:
- raise Exception("notify_type is a mandatory field, expected: incident | notes")
+ raise Exception(
+ "notify_type is a mandatory field, expected: incident | notes"
+ )
try:
resp.raise_for_status()
return resp.json()
@@ -175,7 +223,7 @@ def dispose(self):
if __name__ == "__main__":
import os
- squadcast_api_key = os.environ.get("MAILCHIMP_API_KEY")
+ squadcast_api_key = os.environ.get("SQUADCAST_API_KEY")
context_manager = ContextManager(
tenant_id="singletenant",
workflow_id="test",
@@ -184,11 +232,10 @@ def dispose(self):
config = ProviderConfig(
authentication={"api_key": squadcast_api_key},
)
- provider = SquadcastProvider(context_manager, provider_id="squadcast-test", config=config)
+ provider = SquadcastProvider(
+ context_manager, provider_id="squadcast-test", config=config
+ )
response = provider.notify(
- "onboarding@squadcast.dev",
- "youremail@gmail.com",
- "Hello World from Keep!",
- "Test with HTML",
+ description="test",
)
print(response)
diff --git a/keep/providers/ssh_provider/ssh_provider.py b/keep/providers/ssh_provider/ssh_provider.py
index 22f77e831..078815347 100644
--- a/keep/providers/ssh_provider/ssh_provider.py
+++ b/keep/providers/ssh_provider/ssh_provider.py
@@ -1,6 +1,7 @@
"""
SshProvider is a class that provides a way to execute SSH commands and get the output.
"""
+
import dataclasses
import io
@@ -38,6 +39,9 @@ class SshProviderAuthConfig:
"required": False,
"description": "SSH private key",
"sensitive": True,
+ "type": "file",
+ "name": "pkey",
+ "file_type": "*",
},
)
password: str = dataclasses.field(
@@ -92,7 +96,7 @@ def __generate_client(self) -> SSHClient:
key = RSAKey.from_private_key(
private_key_file, self.config.authentication.get("pkey_passphrase")
)
- ssh_client.connect(host, port, user, pk=key)
+ ssh_client.connect(host, port, user, pkey=key)
else:
# Connect using password
ssh_client.connect(
@@ -147,20 +151,19 @@ def _query(self, command: str, **kwargs: dict):
# Load environment variables
import os
- user = os.environ.get("SSH_USERNAME")
+ user = os.environ.get("SSH_USERNAME") or "root"
password = os.environ.get("SSH_PASSWORD")
- host = os.environ.get("SSH_HOST")
-
+ host = os.environ.get("SSH_HOST") or "1.1.1.1"
+ pkey = os.environ.get("SSH_PRIVATE_KEY")
config = {
- "id": "ssh-demo",
"authentication": {
"user": user,
- "password": password,
+ "pkey": pkey,
"host": host,
},
}
provider = ProvidersFactory.get_provider(
context_manager, provider_id="ssh", provider_type="ssh", provider_config=config
)
- result = provider.query("df -h")
+ result = provider.query(command="df -h")
print(result)
diff --git a/keep/providers/websocket_provider/websocket_provider.py b/keep/providers/websocket_provider/websocket_provider.py
index 80b857c72..df78a338c 100644
--- a/keep/providers/websocket_provider/websocket_provider.py
+++ b/keep/providers/websocket_provider/websocket_provider.py
@@ -65,7 +65,7 @@ def dispose(self):
try:
self.ws.close()
except Exception:
- self.logger.warn("Failed to close websocket connection")
+ self.logger.warning("Failed to close websocket connection")
if __name__ == "__main__":
diff --git a/keep/providers/zabbix_provider/zabbix_provider.py b/keep/providers/zabbix_provider/zabbix_provider.py
index 2a1513cc8..d1091681f 100644
--- a/keep/providers/zabbix_provider/zabbix_provider.py
+++ b/keep/providers/zabbix_provider/zabbix_provider.py
@@ -1,12 +1,13 @@
"""
Zabbix Provider is a class that allows to ingest/digest data from Zabbix.
"""
+
import dataclasses
import datetime
import json
import os
import random
-from typing import Literal
+from typing import Literal, Optional
import pydantic
import requests
@@ -554,7 +555,9 @@ def setup_webhook(
self.logger.info("Finished installing webhook")
@staticmethod
- def _format_alert(event: dict) -> AlertDto:
+ def _format_alert(
+ event: dict, provider_instance: Optional["ZabbixProvider"]
+ ) -> AlertDto:
environment = "unknown"
tags = {
tag.get("tag"): tag.get("value")
diff --git a/keep/rulesengine/rulesengine.py b/keep/rulesengine/rulesengine.py
index 5a75c4772..1105dcc35 100644
--- a/keep/rulesengine/rulesengine.py
+++ b/keep/rulesengine/rulesengine.py
@@ -129,7 +129,8 @@ def run_rules(self, events: list[AlertDto]):
group_status = self._calc_group_status(group.alerts)
# get the payload of the group
- group_payload = self._generate_group_payload(group.alerts)
+ # todo: this is not scaling, needs to find another solution
+ # group_payload = self._generate_group_payload(group.alerts)
# create the alert
group_alert = create_alert_db(
tenant_id=self.tenant_id,
@@ -146,7 +147,7 @@ def run_rules(self, events: list[AlertDto]):
"status": group_status,
"pushed": True,
"group": True,
- "groupPayload": group_payload,
+ # "groupPayload": group_payload,
"fingerprint": group_fingerprint,
**group_attributes,
},
@@ -234,6 +235,12 @@ def _calc_group_fingerprint(self, event: AlertDto, rule):
f"Failed to calculate group fingerprint for event {event.id} and rule {rule.name}"
)
return "none"
+ # if any of the values is None, we will return "none"
+ if any([fingerprint is None for fingerprint in group_fingerprint]):
+ self.logger.warning(
+ f"Failed to fetch the appropriate labels from the event {event.id} and rule {rule.name}"
+ )
+ return "none"
return ",".join(group_fingerprint)
def _calc_group_status(self, alerts):
@@ -281,6 +288,7 @@ def _generate_group_payload(self, alerts):
Returns:
dict: the payload of the group alert
"""
+
# first, group by fingerprints
alerts_by_fingerprint = {}
for alert in alerts:
diff --git a/keep/workflowmanager/workflow.py b/keep/workflowmanager/workflow.py
index 78850ff31..c80ef0a2b 100644
--- a/keep/workflowmanager/workflow.py
+++ b/keep/workflowmanager/workflow.py
@@ -63,7 +63,7 @@ def run_action(self, action: Step):
except Exception as e:
self.logger.error(f"Action {action.name} failed: {e}")
action_status = False
- action_error = str(e)
+ action_error = f"Failed to run action {action.name}: {str(e)}"
return action_status, action_error
def run_actions(self):
@@ -72,8 +72,9 @@ def run_actions(self):
actions_errors = []
for action in self.workflow_actions:
action_status, action_error = self.run_action(action)
- actions_firing.append(action_status)
- actions_errors.append(action_error)
+ if action_error:
+ actions_firing.append(action_status)
+ actions_errors.append(action_error)
self.logger.debug("Actions run")
return actions_firing, actions_errors
diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py
index 96f004a1f..e3bce5485 100644
--- a/keep/workflowmanager/workflowmanager.py
+++ b/keep/workflowmanager/workflowmanager.py
@@ -47,7 +47,7 @@ def stop(self):
self.started = False
def _apply_filter(self, filter_val, value):
- # if its a regex, apply it
+ # if it's a regex, apply it
if filter_val.startswith('r"'):
try:
# remove the r" and the last "
@@ -75,7 +75,7 @@ def insert_events(self, tenant_id, events: typing.List[AlertDto]):
# todo - handle it better
# todo2 - handle if more than one provider is not configured
except ProviderConfigurationException as e:
- self.logger.warn(
+ self.logger.warning(
f"Workflow have a provider that is not configured: {e}"
)
continue
@@ -174,15 +174,18 @@ def insert_events(self, tenant_id, events: typing.List[AlertDto]):
if not should_run:
continue
# Lastly, if the workflow should run, add it to the scheduler
- self.scheduler.workflows_to_run.append(
- {
- "workflow": workflow,
- "workflow_id": workflow_model.id,
- "tenant_id": tenant_id,
- "triggered_by": "alert",
- "event": event,
- }
- )
+ self.logger.info("Adding workflow to run")
+ with self.scheduler.lock:
+ self.scheduler.workflows_to_run.append(
+ {
+ "workflow": workflow,
+ "workflow_id": workflow_model.id,
+ "tenant_id": tenant_id,
+ "triggered_by": "alert",
+ "event": event,
+ }
+ )
+ self.logger.info("Workflow added to run")
def _get_event_value(self, event, filter_key):
# if the filter key is a nested key, get the value
@@ -216,7 +219,7 @@ def run(self, workflows: list[Workflow]):
# If at least one workflow has an interval, run workflows using the scheduler,
# otherwise, just run it
if any([Workflow.workflow_interval for Workflow in workflows]):
- # running workglows in scheduler mode
+ # running workflows in scheduler mode
self.logger.info(
"Found at least one workflow with an interval, running in scheduler mode"
)
@@ -268,7 +271,7 @@ def _run_workflow(self, workflow: Workflow, workflow_execution_id: str):
self.logger.info(
f"Running on_failure action for workflow {workflow.workflow_id}"
)
- # Adding the exception message to the provider context so it'll be available for the action
+ # Adding the exception message to the provider context, so it'll be available for the action
message = (
f"Workflow {workflow.workflow_id} failed with exception: {str(e)}å"
)
diff --git a/keep/workflowmanager/workflowscheduler.py b/keep/workflowmanager/workflowscheduler.py
index 1ee734755..04c6ee96b 100644
--- a/keep/workflowmanager/workflowscheduler.py
+++ b/keep/workflowmanager/workflowscheduler.py
@@ -1,11 +1,11 @@
import enum
import hashlib
-import json
import logging
import threading
import time
import typing
import uuid
+from threading import Lock
from sqlalchemy.exc import IntegrityError
@@ -15,6 +15,7 @@
from keep.api.core.db import get_previous_execution_id
from keep.api.core.db import get_workflow as get_workflow_db
from keep.api.core.db import get_workflows_that_should_run
+from keep.api.models.alert import AlertDto
from keep.api.utils.email_utils import EmailTemplates, send_email
from keep.providers.providers_factory import ProviderConfigurationException
from keep.workflowmanager.workflow import Workflow
@@ -38,6 +39,7 @@ def __init__(self, workflow_manager):
# all workflows that needs to be run due to alert event
self.workflows_to_run = []
self._stop = False
+ self.lock = Lock()
async def start(self):
self.logger.info("Starting workflows scheduler")
@@ -121,7 +123,7 @@ def _run_workflow(
workflow_id=workflow_id,
workflow_execution_id=workflow_execution_id,
status=WorkflowStatus.ERROR,
- error=",".join(str(e) for e in errors),
+ error="\n".join(str(e) for e in errors),
)
else:
self._finish_workflow_execution(
@@ -134,60 +136,53 @@ def _run_workflow(
self.logger.info(f"Workflow {workflow.workflow_id} ran")
def handle_manual_event_workflow(
- self, workflow_id, tenant_id, triggered_by_user, event
+ self, workflow_id, tenant_id, triggered_by_user, alert: AlertDto
):
+ self.logger.info(f"Running manual event workflow {workflow_id}...")
try:
- # if the event is not defined, add some entropy
- if not event:
- event = {
- "workflow_id": workflow_id,
- "triggered_by_user": triggered_by_user,
- "trigger": "manual",
- "time": time.time(),
- }
- else:
- # so unique_execution_number will be different
- event["time"] = time.time()
- unique_execution_number = self._get_unique_execution_number(
- json.dumps(event).encode()
- )
+ unique_execution_number = self._get_unique_execution_number()
+ self.logger.info(f"Unique execution number: {unique_execution_number}")
workflow_execution_id = create_workflow_execution(
workflow_id=workflow_id,
tenant_id=tenant_id,
triggered_by=f"manually by {triggered_by_user}",
execution_number=unique_execution_number,
- fingerprint=event.get("fingerprint"),
+ fingerprint=alert.fingerprint,
)
+ self.logger.info(f"Workflow execution id: {workflow_execution_id}")
# This is kinda WTF exception since create_workflow_execution shouldn't fail for manual
except Exception as e:
self.logger.error(f"WTF: error creating workflow execution: {e}")
raise e
- self.workflows_to_run.append(
- {
+ self.logger.info(
+ "Adding workflow to run",
+ extra={
"workflow_id": workflow_id,
"workflow_execution_id": workflow_execution_id,
"tenant_id": tenant_id,
"triggered_by": "manual",
"triggered_by_user": triggered_by_user,
- "event": event,
- }
+ },
)
+ with self.lock:
+ self.workflows_to_run.append(
+ {
+ "workflow_id": workflow_id,
+ "workflow_execution_id": workflow_execution_id,
+ "tenant_id": tenant_id,
+ "triggered_by": "manual",
+ "triggered_by_user": triggered_by_user,
+ "event": alert,
+ }
+ )
return workflow_execution_id
- def _get_unique_execution_number(self, payload: bytes):
- """Gets a unique execution number for a workflow execution
- # TODO: this is a hack. the execution number is a way to enforce that
- # the interval mechanism will work. we need to find a better way to do it
- # the "correct way" should be to seperate the interval mechanism from the event/manual mechanishm
-
- Args:
- workflow_id (str): the id of the workflow
- tenant_id (str): the id ot the tenant
- payload (bytes): some encoded binary payload
-
+ def _get_unique_execution_number(self):
+ """
Returns:
int: an int represents unique execution number
"""
+ payload = str(uuid.uuid4()).encode()
return int(hashlib.sha256(payload).hexdigest(), 16) % (
WorkflowScheduler.MAX_SIZE_SIGNED_INT + 1
)
@@ -196,9 +191,19 @@ def _handle_event_workflows(self):
# TODO - event workflows should be in DB too, to avoid any state problems.
# take out all items from the workflows to run and run them, also, clean the self.workflows_to_run list
- workflows_to_run, self.workflows_to_run = self.workflows_to_run, []
+ with self.lock:
+ workflows_to_run, self.workflows_to_run = self.workflows_to_run, []
for workflow_to_run in workflows_to_run:
- self.logger.info("Running event workflow on background")
+ self.logger.info(
+ "Running event workflow on background",
+ extra={
+ "workflow_id": workflow_to_run.get("workflow_id"),
+ "workflow_execution_id": workflow_to_run.get(
+ "workflow_execution_id"
+ ),
+ "tenant_id": workflow_to_run.get("tenant_id"),
+ },
+ )
workflow = workflow_to_run.get("workflow")
workflow_id = workflow_to_run.get("workflow_id")
tenant_id = workflow_to_run.get("tenant_id")
@@ -244,9 +249,7 @@ def _handle_event_workflows(self):
# TODO: one more robust way to do it
if not workflow_execution_id:
try:
- workflow_execution_number = self._get_unique_execution_number(
- event.json().encode()
- )
+ workflow_execution_number = self._get_unique_execution_number()
workflow_execution_id = create_workflow_execution(
workflow_id=workflow_id,
tenant_id=tenant_id,
diff --git a/poetry.lock b/poetry.lock
index 78f35139d..815b529c2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -428,34 +428,34 @@ typecheck = ["mypy"]
[[package]]
name = "black"
-version = "23.12.1"
+version = "24.3.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"},
- {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"},
- {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"},
- {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"},
- {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"},
- {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"},
- {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"},
- {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"},
- {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
- {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
- {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
- {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
- {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"},
- {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"},
- {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"},
- {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"},
- {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"},
- {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"},
- {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"},
- {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"},
- {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
- {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
+ {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
+ {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
+ {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
+ {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
+ {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
+ {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
+ {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
+ {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
+ {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
+ {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
+ {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
+ {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
+ {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
+ {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
+ {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
+ {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
+ {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
+ {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
+ {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
+ {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
+ {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
+ {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
]
[package.dependencies]
@@ -1902,18 +1902,19 @@ files = [
[[package]]
name = "jwcrypto"
-version = "1.5.4"
+version = "1.5.6"
description = "Implementation of JOSE Web standards"
category = "main"
optional = false
python-versions = ">= 3.8"
files = [
- {file = "jwcrypto-1.5.4.tar.gz", hash = "sha256:0815fbab613db99bad85691da5f136f8860423396667728a264bcfa6e1db36b0"},
+ {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"},
+ {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"},
]
[package.dependencies]
cryptography = ">=3.4"
-typing_extensions = ">=4.5.0"
+typing-extensions = ">=4.5.0"
[[package]]
name = "kafka-python"
@@ -4138,6 +4139,16 @@ files = [
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
]
+[[package]]
+name = "splunk-sdk"
+version = "1.7.4"
+description = "The Splunk Software Development Kit for Python."
+optional = false
+python-versions = "*"
+files = [
+ {file = "splunk-sdk-1.7.4.tar.gz", hash = "sha256:8f3f149e3a0daf7526ed36882c109e4ec8080e417efe25d23f4578e86d38b9f2"},
+]
+
[[package]]
name = "sqlalchemy"
version = "1.4.41"
@@ -4213,6 +4224,34 @@ postgresql-psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3-binary"]
+[[package]]
+name = "sqlalchemy-utils"
+version = "0.41.1"
+description = "Various utility functions for SQLAlchemy."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"},
+ {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"},
+]
+
+[package.dependencies]
+SQLAlchemy = ">=1.3"
+
+[package.extras]
+arrow = ["arrow (>=0.3.4)"]
+babel = ["Babel (>=1.3)"]
+color = ["colour (>=0.0.4)"]
+encrypted = ["cryptography (>=0.6)"]
+intervals = ["intervals (>=0.7.1)"]
+password = ["passlib (>=1.6,<2.0)"]
+pendulum = ["pendulum (>=2.0.5)"]
+phone = ["phonenumbers (>=5.9.2)"]
+test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
+test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
+timezone = ["python-dateutil"]
+url = ["furl (>=0.4.1)"]
+
[[package]]
name = "sqlalchemy2-stubs"
version = "0.0.2a38"
@@ -4707,4 +4746,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata]
lock-version = "2.0"
python-versions = ">=3.11,<3.12"
-content-hash = "e1db74a81fed28ff1d7ee7084d1e69fc5e2a043a6648072ae7b1d823e3725436"
+content-hash = "9b045439cccca76643fe5bc1e39067b1b1eecd0072b0d0bec0df312f3e748f48"
diff --git a/pyproject.toml b/pyproject.toml
index 8d22cbf01..198b4b5e3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -39,7 +39,7 @@ cloud-sql-python-connector = "^1.2.3"
pymysql = "^1.0.3"
google-cloud-secret-manager = "^2.16.1"
python-jose = "^3.3.0"
-jwcrypto = "^1.5.1"
+jwcrypto = "^1.5.6"
sqlalchemy = "1.4.41"
snowflake-connector-python = "3.1.0"
openai = "^0.27.7"
@@ -78,13 +78,15 @@ google-cloud-trace = "1.11.3"
hvac = "^2.1.0"
mailchimp-transactional = "^1.0.56"
pyodbc = "^5.1.0"
+sqlalchemy-utils = "^0.41.1"
+splunk-sdk = "^1.7.4"
[tool.poetry.group.dev.dependencies]
pre-commit = "^3.0.4"
pre-commit-hooks = "^4.4.0"
yamllint = "^1.29.0"
-black = "^23.1.0"
+black = "^24.3.0"
isort = "^5.12.0"
autopep8 = "^2.0.1"
flake8 = "^6.0.0"
diff --git a/tests/conftest.py b/tests/conftest.py
index 6562b61a7..3218779dd 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,6 +1,6 @@
import os
import random
-from unittest.mock import patch
+from unittest.mock import Mock, patch
import mysql.connector
import pytest
@@ -185,3 +185,24 @@ def db_session(request, mysql_container):
SQLModel.metadata.drop_all(mock_engine)
# Clean up after the test
session.close()
+
+
+@pytest.fixture
+def mocked_context_manager():
+ context_manager = Mock(spec=ContextManager)
+ # Simulate contexts as needed for each test case
+ context_manager.steps_context = {}
+ context_manager.providers_context = {}
+ context_manager.event_context = {}
+ context_manager.click_context = {}
+ context_manager.foreach_context = {"value": None}
+ context_manager.dependencies = set()
+ context_manager.get_full_context.return_value = {
+ "steps": {},
+ "providers": {},
+ "event": {},
+ "alert": {},
+ "foreach": {"value": None},
+ "env": {},
+ }
+ return context_manager
diff --git a/tests/test_alert_deduplicator.py b/tests/test_alert_deduplicator.py
index b2334d723..a83b988f9 100644
--- a/tests/test_alert_deduplicator.py
+++ b/tests/test_alert_deduplicator.py
@@ -33,7 +33,7 @@ def test_deduplication_sanity(db_session):
provider_type="test",
provider_id="test",
event=alert.dict(),
- fingerprint="test",
+ fingerprint=alert.fingerprint,
alert_hash=alert_hash,
)
)
@@ -76,7 +76,7 @@ def test_deduplication_with_matcher(db_session):
provider_type="test",
provider_id="test",
event=alert.dict(),
- fingerprint="test",
+ fingerprint=alert.fingerprint,
alert_hash=alert_hash,
)
)
@@ -128,7 +128,7 @@ def test_deduplication_with_unrelated_filter(db_session):
provider_type="test",
provider_id="test",
event=alert.dict(),
- fingerprint="test",
+ fingerprint=alert.fingerprint,
alert_hash=alert_hash,
)
)
diff --git a/tests/test_functions.py b/tests/test_functions.py
index 958b11335..44a113c33 100644
--- a/tests/test_functions.py
+++ b/tests/test_functions.py
@@ -1,4 +1,5 @@
import datetime
+import json
import pytest
import pytz
@@ -69,6 +70,20 @@ def test_keep_split_function():
assert functions.split("a|b|c", "|") == ["a", "b", "c"]
+def test_keep_uppercase_function():
+ """
+ Test the uppercase function
+ """
+ assert functions.uppercase("a") == "A"
+
+
+def test_keep_lowercase_function():
+ """
+ Test the lowercase function
+ """
+ assert functions.lowercase("A") == "a"
+
+
def test_keep_strip_function():
"""
Test the strip function
@@ -83,6 +98,13 @@ def test_keep_first_function():
assert functions.first([1, 2, 3]) == 1
+def test_keep_last_function():
+ """
+ Test the last function
+ """
+ assert functions.last([1, 2, 3]) == 3
+
+
def test_keep_utcnow_function():
"""
Test the utcnow function
@@ -120,3 +142,86 @@ def test_keep_encode_function():
Test the encode function
"""
assert functions.encode("a b") == "a%20b"
+
+
+def test_len():
+ assert functions.len([1, 2, 3]) == 3
+ assert functions.len([]) == 0
+
+
+def test_all():
+ assert functions.all([True, True, True]) is True
+ assert functions.all([True, False, True]) is False
+
+
+def test_diff():
+ assert functions.diff([1, 1, 1]) is False
+ assert functions.diff([1, 2, 1]) is True
+
+
+def test_uppercase():
+ assert functions.uppercase("test") == "TEST"
+
+
+def test_lowercase():
+ assert functions.lowercase("TEST") == "test"
+
+
+def test_split():
+ assert functions.split("a,b,c", ",") == ["a", "b", "c"]
+
+
+def test_strip():
+ assert functions.strip(" test ") == "test"
+
+
+def test_first():
+ assert functions.first([1, 2, 3]) == 1
+
+
+def test_last():
+ assert functions.last([1, 2, 3]) == 3
+
+
+def test_utcnow():
+ now = datetime.datetime.now(datetime.timezone.utc)
+ func_now = functions.utcnow()
+ # Assuming this test runs quickly, the two times should be within a few seconds of each other
+ assert (func_now - now).total_seconds() < 5
+
+
+def test_utcnowiso():
+ assert isinstance(functions.utcnowiso(), str)
+
+
+def test_substract_minutes():
+ now = datetime.datetime.now(datetime.timezone.utc)
+ earlier = functions.substract_minutes(now, 10)
+ assert (now - earlier).total_seconds() == 600 # 10 minutes
+
+
+def test_to_utc():
+ local_dt = datetime.datetime.now()
+ utc_dt = functions.to_utc(local_dt)
+ # Compare the timezone names instead of the timezone objects
+ assert utc_dt.tzinfo.tzname(utc_dt) == datetime.timezone.utc.tzname(None)
+
+
+def test_datetime_compare():
+ dt1 = datetime.datetime.now(datetime.timezone.utc)
+ dt2 = functions.substract_minutes(dt1, 60) # 1 hour earlier
+ assert functions.datetime_compare(dt1, dt2) == 1
+
+
+def test_json_dumps():
+ data = {"key": "value"}
+ expected = json.dumps(data, indent=4, default=str)
+ assert functions.json_dumps(data) == expected
+
+
+def test_encode():
+ assert functions.encode("test value") == "test%20value"
+
+
+def test_dict_to_key_value_list():
+ assert functions.dict_to_key_value_list({"a": 1, "b": "test"}) == ["a:1", "b:test"]
diff --git a/tests/test_iohandler.py b/tests/test_iohandler.py
index f70432b98..83012115b 100644
--- a/tests/test_iohandler.py
+++ b/tests/test_iohandler.py
@@ -1,8 +1,11 @@
"""
Test the io handler
"""
+import datetime
+import pytest
+from keep.api.models.alert import AlertDto
from keep.iohandler.iohandler import IOHandler
@@ -72,3 +75,617 @@ def test_with_json_dumps_when_json_string(context_manager):
}
s = iohandler.render("hello keep.json_dumps({{ steps.some_list }})")
assert s == "hello [\n 1,\n 2,\n 3\n]"
+
+
+def test_alert_with_odd_number_of_parentheses(context_manager):
+ """Tests complex alert with odd number of parentheses
+
+ "unterminated string literal" error is raised when the alert message contains an odd number of parentheses
+ """
+
+ # this is example for sentry alert with
+ # " }, [data, data?.testDetails, onSelect]);",
+ # that screwed are iohandler
+ #
+ e = {
+ "type": "Error",
+ "value": "Object captured as exception with keys: test, test2, test3, test4, test5",
+ "mechanism": {
+ "type": "generic",
+ "handled": True,
+ "synthetic": True,
+ },
+ "stacktrace": {
+ "frames": [
+ {
+ "colno": 1,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ },
+ {
+ "colno": 2,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ "function": "r8",
+ },
+ {
+ "colno": 3,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ "function": "oP",
+ },
+ {
+ "colno": 4,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ "function": "oU",
+ },
+ {
+ "colno": 5,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ },
+ {
+ "colno": 6,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ "function": "oV",
+ },
+ {
+ "colno": 7,
+ "in_app": False,
+ "lineno": 9,
+ "module": "chunks/framework",
+ "abs_path": "app:///_next/static/chunks/framework-test.js",
+ "filename": "app:///_next/static/chunks/framework-test.js",
+ "function": "uU",
+ },
+ {
+ "data": {
+ "sourcemap": "app:///_next/static/chunks/pages/_app-test.js.map",
+ "symbolicated": True,
+ "resolved_with": "index",
+ },
+ "colno": 14,
+ "in_app": True,
+ "lineno": 43,
+ "module": "chunks/pages/modules/shared/components/test-test-test/test-test-test",
+ "abs_path": "app:///_next/static/chunks/pages/modules/shared/components/test-test-test/test-test-test.tsx",
+ "filename": "./modules/shared/components/test-test-test/test-test-test.tsx",
+ "function": "",
+ "pre_context": [
+ " onSelect(data.testDetails);",
+ " }",
+ " }, [data, data?.testDetails, onSelect]);",
+ "",
+ " useEffect(() => {",
+ ],
+ "context_line": " error && captureException(error, { extra: { test } });",
+ "post_context": [
+ " }, [error, testId]);",
+ "",
+ " const onSelectHandler = (externalReference: string) => {",
+ " setTestId(externalReference);",
+ " };",
+ ],
+ },
+ {
+ "data": {
+ "sourcemap": "app:///_next/static/chunks/pages/_app-test.js.map",
+ "symbolicated": True,
+ "resolved_with": "index",
+ },
+ "colno": 23,
+ "in_app": False,
+ "lineno": 21,
+ "module": "chunks/pages/node_modules/@sentry/core/esm/exports",
+ "abs_path": "app:///_next/static/chunks/pages/node_modules/@sentry/core/esm/exports.js",
+ "filename": "./node_modules/@sentry/core/esm/exports.js",
+ "function": "captureException",
+ "pre_context": [
+ " // eslint-disable-next-line @typescript-eslint/no-explicit-any",
+ " exception,",
+ " hint,",
+ ") {",
+ " // eslint-disable-next-line deprecation/deprecation",
+ ],
+ "context_line": " return test();",
+ "post_context": [
+ "}",
+ "",
+ "/**",
+ " * Captures a message event and sends it to Sentry.",
+ " *",
+ ],
+ },
+ ]
+ },
+ "raw_stacktrace": {
+ "frames": [
+ {
+ "colno": 1,
+ "in_app": True,
+ "lineno": 9,
+ "abs_path": "app:///_next/static/chunks/test-test.js",
+ "filename": "app:///_next/static/chunks/test-test.js",
+ },
+ {
+ "colno": 2,
+ "in_app": True,
+ "lineno": 8,
+ "abs_path": "app:///_next/static/chunks/pages/_app-test.js",
+ "filename": "app:///_next/static/chunks/pages/_app-test.js",
+ "pre_context": [
+ " *",
+ " * Copyright (c) Facebook, Inc. and its affiliates.",
+ " *",
+ " * This source code is licensed under the MIT license found in the",
+ " * LICENSE file in the root directory of this source tree.",
+ ],
+ "context_line": "{snip} ?void 0:f.testDetails,u]),(0,s.useEffect)(()=>{h&&(0,D.Tb)(h,{extra:{test:l}})},[h,l]);let m=e=>{d(e)},g=(0,s.useMemo)(()=>t&&0===n.leng {snip}",
+ "post_context": [
+ "Sentry.addTracingExtensions();",
+ "Sentry.init({...});",
+ '{snip} y{return"SentryError"===e.exception.values[0].type}catch(e){}return!1}(t)?(x.X&&k.kg.warn(`Event dropped due to being internal Sentry Error.',
+ "{snip} ge for event ${(0,P.jH)(e)}`),n})(t).some(e=>(0,B.U0)(e,o)))?(x.X&&k.kg.warn(`Event dropped due to being matched by \\`ignoreErrors\\` option.",
+ "{snip} eturn!0;let n=$(e);return!n||(0,B.U0)(n,t)}(t,i.allowUrls)||(x.X&&k.kg.warn(`Event dropped due to not being matched by \\`allowUrls\\` option.",
+ ],
+ },
+ {
+ "colno": 162667,
+ "in_app": True,
+ "lineno": 8,
+ "abs_path": "app:///_next/static/chunks/pages/_app-test.js",
+ "filename": "app:///_next/static/chunks/pages/_app-test.js",
+ "function": "u",
+ "pre_context": [
+ " *",
+ " * Copyright (c) Facebook, Inc. and its affiliates.",
+ " *",
+ " * This source code is licensed under the MIT license found in the",
+ " * LICENSE file in the root directory of this source tree.",
+ ],
+ "context_line": "{snip} 300,letterSpacing1400:t.letterSpacing1400,letterSpacing1500:t.letterSpacing1500,letterSpacing1600:t.letterSpacing1600}},8248:function(e,t,n) {snip}",
+ "post_context": [
+ "Sentry.addTracingExtensions();",
+ "Sentry.init({...});",
+ '{snip} y{return"SentryError"===e.exception.values[0].type}catch(e){}return!1}(t)?(x.X&&k.kg.warn(`Event dropped due to being internal Sentry Error.',
+ "{snip} ge for event ${(0,P.jH)(e)}`),n})(t).some(e=>(0,B.U0)(e,o)))?(x.X&&k.kg.warn(`Event dropped due to being matched by \\`ignoreErrors\\` option.",
+ "{snip} eturn!0;let n=$(e);return!n||(0,B.U0)(n,t)}(t,i.allowUrls)||(x.X&&k.kg.warn(`Event dropped due to not being matched by \\`allowUrls\\` option.",
+ ],
+ },
+ ]
+ },
+ }
+ context_manager.alert = AlertDto(
+ **{
+ "id": "test",
+ "name": "test",
+ "lastReceived": "2024-03-20T00:00:00.000Z",
+ "source": ["sentry"],
+ "environment": "prod",
+ "service": None,
+ "apiKeyRef": None,
+ "message": "Object captured as exception with keys: test, test2, test3, test4, test5",
+ "labels": {},
+ "fingerprint": "testtesttest",
+ "dismissUntil": None,
+ "dismissed": False,
+ "jira_component": "Test",
+ "linearb_service": "Test - UI",
+ "jira_component_full": "Test (UI)",
+ "alert_hash": "testtesttest",
+ "jira_priority": "High",
+ "exceptions": [e, e],
+ "github_repo": "https://github.com/test/test.git",
+ "tags": {
+ "os": "Windows >=10",
+ "url": "https://test.test.keephq.dev/keep",
+ "level": "error",
+ "browser": "Edge 122.0.0",
+ "handled": "yes",
+ "os.name": "Windows",
+ "release": "1234",
+ "runtime": "browser",
+ "mechanism": "generic",
+ "transaction": "/test",
+ "browser.name": "Edge",
+ "service_name": "keep-test",
+ },
+ }
+ )
+ context_manager.event_context = context_manager.alert
+ iohandler = IOHandler(context_manager)
+ s = iohandler.render(
+ "{{#alert.exceptions}}\n*{{ type }}*\n{{ value }}\n\n*Stack Trace*\n{code:json} keep.json_dumps({{{ stacktrace }}}) {code}\n{{/alert.exceptions}}\n{{^alert.exceptions}}\nNo stack trace available\n{{/alert.exceptions}}\n\n*Tags*\n{code:json} keep.json_dumps({{{ alert.tags }}}) {code}\n\nSee: {{ alert.url }}\n",
+ )
+ assert "test, test2, test3, test4, test5" in s
+ assert "aptures a message event and sends it to Sentry" in s
+
+
+def test_functions(mocked_context_manager):
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {"some_list": [["Asd", 2, 3], [4, 5, 6], [7, 8, 9]]},
+ }
+ iohandler = IOHandler(mocked_context_manager)
+ s = iohandler.render("result is keep.first(keep.first({{ steps.some_list }}))")
+ assert s == "result is Asd"
+
+
+def test_render_with_json_dumps_function(mocked_context_manager):
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {"some_object": {"key": "value"}}
+ }
+ iohandler = IOHandler(mocked_context_manager)
+ template = "JSON: keep.json_dumps({{ steps.some_object }})"
+ rendered = iohandler.render(template)
+ assert rendered == 'JSON: {\n "key": "value"\n}'
+
+
+def test_render_uppercase(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "hello keep.uppercase('world')"
+ result = iohandler.render(template)
+ assert result == "hello WORLD"
+
+
+def test_render_datetime_compare(context_manager):
+ now = datetime.datetime.utcnow()
+ one_hour_ago = now - datetime.timedelta(hours=1)
+ context_manager.steps_context = {
+ "now": now.isoformat(),
+ "one_hour_ago": one_hour_ago.isoformat(),
+ }
+ iohandler = IOHandler(context_manager)
+ template = "Difference in hours: keep.datetime_compare(keep.to_utc('{{ steps.now }}'), keep.to_utc('{{ steps.one_hour_ago }}'))"
+ result = iohandler.render(template)
+ assert "Difference in hours: 1.0" in result
+
+
+def test_get_pods_foreach(mocked_context_manager):
+ # Mock pods data as would be returned by the `get-pods` step
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {
+ "get-pods": {
+ "results": [
+ {
+ "metadata": {"name": "pod1", "namespace": "default"},
+ "status": {"phase": "Running"},
+ },
+ {
+ "metadata": {"name": "pod2", "namespace": "kube-system"},
+ "status": {"phase": "Pending"},
+ },
+ ]
+ }
+ }
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ template = "Pod status report:{{#steps.get-pods.results}}\nPod name: {{ metadata.name }} || Namespace: {{ metadata.namespace }} || Status: {{ status.phase }}{{/steps.get-pods.results}}"
+ rendered = iohandler.render(template)
+
+ expected_output = "Pod status report:\nPod name: pod1 || Namespace: default || Status: Running\nPod name: pod2 || Namespace: kube-system || Status: Pending"
+ assert rendered.strip() == expected_output.strip()
+
+
+def test_resend_python_service_condition(mocked_context_manager):
+ # Mock return_code to simulate the success scenario
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {"run-script": {"results": {"return_code": 0}}}
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ condition = "{{ steps.run-script.results.return_code }} == 0"
+ # Simulate condition evaluation
+ assert eval(iohandler.render(condition)) is True
+
+
+def test_blogpost_workflow_enrich_alert(mocked_context_manager):
+ # Mock customer data as would be returned by the `get-more-details` step
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {
+ "get-more-details": {
+ "results": {
+ "name": "John Doe",
+ "email": "john@example.com",
+ "tier": "premium",
+ }
+ }
+ },
+ "alert": {"customer_id": 123},
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ # Assume this template represents the enrichment logic
+ template = "Customer details: Name: {{ steps.get-more-details.results.name }}, Email: {{ steps.get-more-details.results.email }}, Tier: {{ steps.get-more-details.results.tier }}"
+ rendered = iohandler.render(template)
+
+ expected_output = (
+ "Customer details: Name: John Doe, Email: john@example.com, Tier: premium"
+ )
+ assert rendered == expected_output
+
+
+def test_sentry_alerts_conditions(mocked_context_manager):
+ # Mock alert data to simulate a sentry alert for the payments service
+ mocked_context_manager.get_full_context.return_value = {
+ "alert": {
+ "service": "payments",
+ "name": "Error Alert",
+ "description": "Critical error occurred.",
+ }
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ condition_payments = "'{{ alert.service }}' == 'payments'"
+ condition_ftp = "'{{ alert.service }}' == 'ftp'"
+
+ # Simulate condition evaluations
+ assert eval(iohandler.render(condition_payments)) is True
+ assert eval(iohandler.render(condition_ftp)) is False
+
+
+def test_db_disk_space_alert(mocked_context_manager):
+ # Mock datadog logs data as would be returned by the `check-error-rate` step
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {"check-error-rate": {"results": {"logs": ["Error 1", "Error 2"]}}}
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ template = "Number of logs: keep.len({{ steps.check-error-rate.results.logs }})"
+ rendered = iohandler.render(template)
+
+ assert rendered == "Number of logs: 2"
+
+
+def test_query_bigquery_for_customer_tier(mocked_context_manager):
+ # Mock customer tier data as would be returned by the `get-customer-tier-by-id` step
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {
+ "get-customer-tier-by-id": {
+ "result": {"customer_name": "Acme Corp", "tier": "enterprise"}
+ }
+ },
+ "alert": {"customer_id": "123"},
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ # Check if the enterprise-tier condition correctly asserts
+ condition = "'{{ steps.get-customer-tier-by-id.result.tier }}' == 'enterprise'"
+ assert eval(iohandler.render(condition)) is True
+
+
+def test_opsgenie_get_open_alerts(mocked_context_manager):
+ # Mock open alerts data as would be returned by the `get-open-alerts` step
+ mocked_context_manager.get_full_context.return_value = {
+ "steps": {
+ "get-open-alerts": {
+ "results": {
+ "number_of_alerts": 2,
+ "alerts": [
+ {
+ "id": "1",
+ "priority": "high",
+ "created_at": "2024-03-20T12:00:00Z",
+ "message": "Critical issue",
+ },
+ {
+ "id": "2",
+ "priority": "medium",
+ "created_at": "2024-03-20T13:00:00Z",
+ "message": "Minor issue",
+ },
+ ],
+ }
+ }
+ }
+ }
+
+ iohandler = IOHandler(mocked_context_manager)
+ template = (
+ "Opsgenie has {{ steps.get-open-alerts.results.number_of_alerts }} open alerts"
+ )
+ rendered = iohandler.render(template)
+
+ assert "Opsgenie has 2 open alerts" in rendered
+
+
+def test_malformed_template_with_unmatched_braces(context_manager):
+ iohandler = IOHandler(context_manager)
+ malformed_template = "This template has an unmatched {{ brace."
+
+ with pytest.raises(Exception) as excinfo:
+ iohandler.render(malformed_template)
+
+ # Adjusted the assertion to match the actual error message
+ assert "number of } and { does not match" in str(excinfo.value)
+
+
+"""
+this is actually a bug but minor priority for now
+
+def test_malformed_template_with_incorrect_function_syntax(context_manager):
+ iohandler = IOHandler(context_manager)
+ wrong_function_use = "Incorrect function call keep.lenֿ[wrong_syntax]"
+
+ rendered = iohandler.render(wrong_function_use)
+
+ assert wrong_function_use == rendered
+"""
+
+
+def test_unrecognized_function_call(context_manager):
+ iohandler = IOHandler(context_manager)
+ template_with_unrecognized_function = (
+ "Calling an unrecognized function keep.nonexistent_function()"
+ )
+
+ with pytest.raises(Exception) as excinfo:
+ iohandler.render(template_with_unrecognized_function)
+
+ assert "module 'keep.functions' has no attribute" in str(
+ excinfo.value
+ ) # This assertion depends on the specific error handling and messaging in your application
+
+
+def test_missing_closing_parenthesis(context_manager):
+ iohandler = IOHandler(context_manager)
+ malformed_template = "keep.len({{ steps.some_list }"
+ extracted_functions = iohandler.extract_keep_functions(malformed_template)
+ assert (
+ len(extracted_functions) == 0
+ ), "Expected no functions to be extracted due to missing closing parenthesis."
+
+
+def test_nested_malformed_function_calls(context_manager):
+ iohandler = IOHandler(context_manager)
+ malformed_template = (
+ "keep.first(keep.len({{ steps.some_list }, keep.lowercase('TEXT')"
+ )
+ extracted_functions = iohandler.extract_keep_functions(malformed_template)
+ assert (
+ len(extracted_functions) == 0
+ ), "Expected no functions to be extracted due to malformed nested calls."
+
+
+def test_extra_closing_parenthesis(context_manager):
+ iohandler = IOHandler(context_manager)
+ malformed_template = "keep.len({{ steps.some_list }}))"
+ extracted_functions = iohandler.extract_keep_functions(malformed_template)
+ # Assuming the method can ignore the extra closing parenthesis and still extract the function correctly
+ assert (
+ len(extracted_functions) == 1
+ ), "Expected one function to be extracted despite an extra closing parenthesis."
+
+
+def test_incorrect_function_name(context_manager):
+ iohandler = IOHandler(context_manager)
+ malformed_template = "keep.lenght({{ steps.some_list }})"
+ extracted_functions = iohandler.extract_keep_functions(malformed_template)
+ # Assuming the method extracts the function call regardless of the function name being valid
+ assert (
+ len(extracted_functions) == 1
+ ), "Expected one function to be extracted despite the incorrect function name."
+
+
+def test_keep_in_string_not_as_function_call(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Here is a sentence with keep. not as a function call: 'Let's keep. moving forward.'"
+ extracted_functions = iohandler.extract_keep_functions(template)
+ assert (
+ len(extracted_functions) == 0
+ ), "Expected no functions to be extracted when 'keep.' is part of a string."
+
+
+def test_no_function_calls(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "This is a sentence with keep. but no function calls."
+ # Assuming extract_keep_functions is a method of setup object
+ functions = iohandler.extract_keep_functions(template)
+ assert len(functions) == 0, "Should find no functions"
+
+
+def test_keep_in_string_not_as_function_call(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Here is a sentence with keep. not as a function call: 'Let's keep. moving forward.'"
+ functions = iohandler.extract_keep_functions(template)
+ assert (
+ len(functions) == 0
+ ), "Should find no functions when 'keep.' is part of a string."
+
+
+def test_malformed_function_calls(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Here is a malformed function call keep.(without closing parenthesis."
+ functions = iohandler.extract_keep_functions(template)
+ assert len(functions) == 0, "Should handle malformed function calls gracefully."
+
+
+def test_mixed_content(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Mix of valid keep.doSomething() and text keep. not as a call."
+ functions = iohandler.extract_keep_functions(template)
+ assert len(functions) == 1, "Should only extract valid function calls."
+
+
+def test_nested_functions(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Nested functions keep.nest(keep.inner()) should be handled."
+ functions = iohandler.extract_keep_functions(template)
+ assert len(functions) == 1, "Should handle nested functions without getting stuck."
+
+
+def test_endless_loop_potential(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "keep.() empty function call followed by text keep. not as a call."
+ functions = iohandler.extract_keep_functions(template)
+ assert (
+ len(functions) == 1
+ ), "Should not enter an endless loop with empty function calls."
+
+
+def test_edge_case_with_escaped_quotes(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = (
+ r"Edge case keep.function('argument with an escaped quote\\') and more text."
+ )
+ functions = iohandler.extract_keep_functions(template)
+ assert (
+ len(functions) == 1
+ ), "Should correctly handle escaped quotes within function arguments."
+
+
+def test_consecutive_function_calls(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Consecutive keep.first() and keep.second() calls."
+ functions = iohandler.extract_keep_functions(template)
+ assert len(functions) == 2, "Should correctly handle consecutive function calls."
+
+
+def test_function_call_at_end(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Function call at the very end keep.end()"
+ functions = iohandler.extract_keep_functions(template)
+ assert (
+ len(functions) == 1
+ ), "Should correctly handle a function call at the end of the string."
+
+
+def test_complex_mixture(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "Mix keep.start() some text keep.in('middle') and malformed keep. and valid keep.end()."
+ functions = iohandler.extract_keep_functions(template)
+ assert (
+ len(functions) == 3
+ ), "Should correctly handle a complex mixture of text and function calls."
+
+
+"""
+def test_escaped_quotes_inside_function_arguments(context_manager):
+ iohandler = IOHandler(context_manager)
+ template = "keep.split('some,string,with,escaped\\\\'quotes', ',')"
+ extracted_functions = iohandler.extract_keep_functions(template)
+ # Assuming the method can handle escaped quotes within function arguments
+ assert (
+ len(extracted_functions) == 1
+ ), "Expected one function to be extracted with escaped quotes inside arguments."
+"""