From b3fa1bbb22509250c91ce339e70e907ade1a678f Mon Sep 17 00:00:00 2001
From: Ezhil Shanmugham
Date: Tue, 10 Sep 2024 18:20:13 +0530
Subject: [PATCH 01/10] feat: clickhouse provider (#1889)
Co-authored-by: Shahar Glazner
---
README.md | 2 +
docs/mint.json | 1 +
.../documentation/clickhouse-provider.mdx | 29 ++++
examples/workflows/query_clickhouse.yml | 31 ++++
keep-ui/public/icons/clickhouse-icon.png | Bin 0 -> 5755 bytes
keep/providers/clickhouse_provider/README.md | 84 +++++++++
.../providers/clickhouse_provider/__init__.py | 0
.../clickhouse_provider.py | 164 ++++++++++++++++++
poetry.lock | 144 ++++++++++++++-
pyproject.toml | 1 +
10 files changed, 454 insertions(+), 2 deletions(-)
create mode 100644 docs/providers/documentation/clickhouse-provider.mdx
create mode 100644 examples/workflows/query_clickhouse.yml
create mode 100644 keep-ui/public/icons/clickhouse-icon.png
create mode 100644 keep/providers/clickhouse_provider/README.md
create mode 100644 keep/providers/clickhouse_provider/__init__.py
create mode 100644 keep/providers/clickhouse_provider/clickhouse_provider.py
diff --git a/README.md b/README.md
index 439d5aa4f..04db95c11 100644
--- a/README.md
+++ b/README.md
@@ -147,6 +147,8 @@ Workflow triggers can either be executed manually when an alert is activated or
+
+
Communication platforms
diff --git a/docs/mint.json b/docs/mint.json
index 9ae4e6fe4..99e9a8b8a 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -102,6 +102,7 @@
"providers/documentation/axiom-provider",
"providers/documentation/azuremonitoring-provider",
"providers/documentation/centreon-provider",
+ "providers/documentation/clickhouse-provider",
"providers/documentation/cloudwatch-provider",
"providers/documentation/console-provider",
"providers/documentation/coralogix-provider",
diff --git a/docs/providers/documentation/clickhouse-provider.mdx b/docs/providers/documentation/clickhouse-provider.mdx
new file mode 100644
index 000000000..5afd1d7c4
--- /dev/null
+++ b/docs/providers/documentation/clickhouse-provider.mdx
@@ -0,0 +1,29 @@
+---
+title: 'ClickHouse'
+sidebarTitle: 'ClickHouse Provider'
+description: 'ClickHouse provider allows you to interact with ClickHouse database.'
+---
+
+## Overview
+
+ClickHouse is an open-source column-oriented DBMS for online analytical processing that allows users to generate analytical reports using SQL queries in real-time.
+
+## Authentication Parameters
+
+The ClickHouse provider requires the following authentication parameters:
+
+- `Clickhouse Username`: The username to authenticate with ClickHouse.
+- `Clickhouse Password`: The password to authenticate with ClickHouse.
+- `Clickhouse Hostname`: The host where ClickHouse is running.
+- `Clickhouse Port`: The port where ClickHouse is running. The default port is `9000`.
+- `Clickhouse Database`: The database to connect to.
+
+## Connecting with the ClickHouse provider
+
+1. Obtain the required authentication parameters.
+2. Add ClickHouse provider to your keep account and configure with the above authentication parameters.
+
+## Useful Links
+
+- [ClickHouse](https://clickhouse.com/)
+- [ClickHouse Statements](https://clickhouse.com/docs/en/sql-reference/statements/)
diff --git a/examples/workflows/query_clickhouse.yml b/examples/workflows/query_clickhouse.yml
new file mode 100644
index 000000000..50ed16054
--- /dev/null
+++ b/examples/workflows/query_clickhouse.yml
@@ -0,0 +1,31 @@
+id: query-clickhouse
+description: Query Clickhouse and send an alert if there is an error
+triggers:
+ - type: manual
+
+steps:
+ - name: clickhouse-step
+ provider:
+ config: "{{ providers.clickhouse }}"
+ type: clickhouse
+ with:
+ query: SELECT * FROM logs_table ORDER BY timestamp DESC LIMIT 1;
+ single_row: "True"
+
+actions:
+ - name: ntfy-action
+ if: "'{{ steps.clickhouse-step.results.level }}' == 'ERROR'"
+ provider:
+ config: "{{ providers.ntfy }}"
+ type: ntfy
+ with:
+ message: "Error in clickhouse logs_table: {{ steps.clickhouse-step.results.level }}"
+ topic: clickhouse
+
+ - name: slack-action
+ if: "'{{ steps.clickhouse-step.results.level }}' == 'ERROR'"
+ provider:
+ config: "{{ providers.slack }}"
+ type: slack
+ with:
+ message: "Error in clickhouse logs_table: {{ steps.clickhouse-step.results.level }}"
diff --git a/keep-ui/public/icons/clickhouse-icon.png b/keep-ui/public/icons/clickhouse-icon.png
new file mode 100644
index 0000000000000000000000000000000000000000..b944f5838eceb1c54ffd18914b1f9a83f455127d
GIT binary patch
literal 5755
zcmeAS@N?(olHy`uVBq!ia0y~y;5@>>z_E^n87Oku^?VzUQVj43aRt)<&w$ZchX1w<
z|Lqz6Lqs6t|Nme8B~}9!ag+r41v9t>B<#NM;K3fghIF79W0JSK3s=U4FJTM}qF+2+
z978H@y}iDX^H6|9>%|p}w;!-DE-#R_-dNC(qbSy8ZPC>_x7Rp@q5BA9&io?I=RO~b
zGK81~o%qNw6px4pW}ws713M
zLqEw;t5B$hzpTTOU{T6SOnHo`Ln{kOmdA)XLQy-xLKqm<$f=%if<-ASF%mRp)S;SB
zx;#eI5sF%mIiuASFp`jC6&%TwMlB)zq(`j~U8<>NOU)PqZmja$nKH# list | tuple:
+ """
+ Executes a query against the Clickhouse database.
+
+ Returns:
+ list | tuple: list of results or single result if single_row is True
+ """
+ return self._notify(query=query, single_row=single_row, **kwargs)
+
+ def _notify(
+ self, query="", single_row=False, **kwargs: dict
+ ) -> list | tuple:
+ """
+ Executes a query against the Clickhouse database.
+
+ Returns:
+ list | tuple: list of results or single result if single_row is True
+ """
+ client = self.__generate_client()
+ cursor = client.cursor(cursor_factory=DictCursor)
+
+ if kwargs:
+ query = query.format(**kwargs)
+
+ cursor.execute(query)
+ results = cursor.fetchall()
+
+ cursor.close()
+ if single_row:
+ return results[0]
+
+ return results
+
+
+if __name__ == "__main__":
+ config = ProviderConfig(
+ authentication={
+ "username": os.environ.get("CLICKHOUSE_USER"),
+ "password": os.environ.get("CLICKHOUSE_PASSWORD"),
+ "host": os.environ.get("CLICKHOUSE_HOST"),
+ "database": os.environ.get("CLICKHOUSE_DATABASE"),
+ }
+ )
+ context_manager = ContextManager(
+ tenant_id="singletenant",
+ workflow_id="test",
+ )
+ clickhouse_provider = ClickhouseProvider(context_manager, "clickhouse-prod", config)
+ results = clickhouse_provider.query(query="SELECT * FROM logs_table ORDER BY timestamp DESC LIMIT 1")
+ print(results)
diff --git a/poetry.lock b/poetry.lock
index 420ee71a7..563eb33e4 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -822,6 +822,128 @@ files = [
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
+[[package]]
+name = "clickhouse-driver"
+version = "0.2.9"
+description = "Python driver with native interface for ClickHouse"
+optional = false
+python-versions = "<4,>=3.7"
+files = [
+ {file = "clickhouse-driver-0.2.9.tar.gz", hash = "sha256:050ea4870ead993910b39e7fae965dc1c347b2e8191dcd977cd4b385f9e19f87"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ce04e9d0d0f39561f312d1ac1a8147bc9206e4267e1a23e20e0423ebac95534"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ae5c8931bf290b9d85582e7955b9aad7f19ff9954e48caa4f9a180ea4d01078"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e51792f3bd12c32cb15a907f12de3c9d264843f0bb33dce400e3966c9f09a3f"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42fc546c31e4a04c97b749769335a679c9044dc693fa7a93e38c97fd6727173d"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a383a403d185185c64e49edd6a19b2ec973c5adcb8ebff7ed2fc539a2cc65a5"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05321a97e816afc75b3e4f9eda989848fecf14ecf1a91d0f22c04258123d1f7"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47e793846aac28442b6b1c6554e0731b848a5a7759a54aa2489997354efe4a"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:780e42a215d1ae2f6d695d74dd6f087781fb2fa51c508b58f79e68c24c5364e0"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e28f1fe850675e173db586e9f1ac790e8f7edd507a4227cd54cd7445f8e75b6"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:125aae7f1308d3083dadbb3c78f828ae492e060f13e4007a0cf53a8169ed7b39"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2f3c4fbb61e75c62a1ab93a1070d362de4cb5682f82833b2c12deccb3bae888d"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dc03196a84e32d23b88b665be69afae98f57426f5fdf203e16715b756757961"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-win32.whl", hash = "sha256:25695d78a1d7ad6e221e800612eac08559f6182bf6dee0a220d08de7b612d993"},
+ {file = "clickhouse_driver-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:367acac95398d721a0a2a6cf87e93638c5588b79498a9848676ce7f182540a6c"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6af1c6cbc3481205503ab72a34aa76d6519249c904aa3f7a84b31e7b435555be"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f202a58a540c85e47c31dabc8f84b6fe79dca5315c866450a538d58d6fa0571"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4df50fd84bfa4aa1eb7b52d48136066bfb64fabb7ceb62d4c318b45a296200b"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433a650571a0d7766eb6f402e8f5930222997686c2ee01ded22f1d8fd46af9d4"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232ee260475611cbf7adb554b81db6b5790b36e634fe2164f4ffcd2ca3e63a71"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:09049f7e71f15c9c9a03f597f77fc1f7b61ababd155c06c0d9e64d1453d945d7"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:424153d1d5f5a807f596a48cc88119f9fb3213ca7e38f57b8d15dcc964dd91f7"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4f078fd1cf19c4ca63b8d1e0803df665310c8d5b644c5b02bf2465e8d6ef8f55"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f138d939e26e767537f891170b69a55a88038919f5c10d8865b67b8777fe4848"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9aafabc7e32942f85dcb46f007f447ab69024831575df97cae28c6ed127654d1"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-win32.whl", hash = "sha256:935e16ebf1a1998d8493979d858821a755503c9b8af572d9c450173d4b88868c"},
+ {file = "clickhouse_driver-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:306b3102cba278b5dfec6f5f7dc8b78416c403901510475c74913345b56c9e42"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a3e6b0a1eb218e3d870a94c76daaf65da46dca8f6888ea6542f94905c24d88"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85d50c011467f5ff6772c4059345968b854b72e07a0219030b7c3f68419eb7f7"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93b395c1370629ccce8fb3e14cd5be2646d227bd32018c21f753c543e9a7e96b"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fffa5a5f317b1ec92e406a30a008929054cf3164d2324a3c465d0a0330273bf8"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:476702740a279744badbd177ae1c4a2d089ec128bd676861219d1f92078e4530"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5cd6d95fab5ff80e9dc9baedc9a926f62f74072d42d5804388d63b63bec0bb63"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:05027d32d7cf3e46cb8d04f8c984745ae01bd1bc7b3579f9dadf9b3cca735697"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3d11831842250b4c1b26503a6e9c511fc03db096608b7c6af743818c421a3032"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81b4b671b785ebb0b8aeabf2432e47072413d81db959eb8cfd8b6ab58c5799c6"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-win32.whl", hash = "sha256:e893bd4e014877174a59e032b0e99809c95ec61328a0e6bd9352c74a2f6111a8"},
+ {file = "clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:909205324089a9ee59bee7ecbfa94595435118cca310fd62efdf13f225aa2965"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f31d6e47dc2b0f367f598f5629147ed056d7216c1788e25190fcfbfa02e749"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed84179914b2b7bb434c2322a6e7fd83daa681c97a050450511b66d917a129bb"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67d1bf63efb4ba14ae6c6da99622e4a549e68fc3ee14d859bf611d8e6a61b3fa"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eed23ea41dd582d76f7a2ec7e09cbe5e9fec008f11a4799fa35ce44a3ebd283"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a654291132766efa2703058317749d7c69b69f02d89bac75703eaf7f775e20da"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c26c5ef16d0ef3cabc5bc03e827e01b0a4afb5b4eaf8850b7cf740cee04a1d4"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b57e83d7986d3cbda6096974a9510eb53cb33ad9072288c87c820ba5eee3370e"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:153cc03b36f22cbde55aa6a5bbe99072a025567a54c48b262eb0da15d8cd7c83"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:83a857d99192936091f495826ae97497cd1873af213b1e069d56369fb182ab8e"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb05a9bb22cbe9ad187ad268f86adf7e60df6083331fe59c01571b7b725212dd"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-win32.whl", hash = "sha256:3e282c5c25e32d96ed151e5460d2bf4ecb805ea64449197dd918e84e768016df"},
+ {file = "clickhouse_driver-0.2.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c46dccfb04a9afd61a1b0e60bfefceff917f76da2c863f9b36b39248496d5c77"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:612ca9028c718f362c97f552e63d313cf1a70a616ef8532ddb0effdaf12ebef9"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471b884d318e012f68d858476052742048918854f7dfe87d78e819f87a848ffb"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ee63c35e99da887eb035c8d6d9e64fd298a0efc1460395297dd5cc281a6912"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0819bb63d2c5025a1fb9589f57ef82602687cef11081d6dfa6f2ce44606a1772"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6680ee18870bca1fbab1736c8203a965efaec119ab4c37821ad99add248ee08"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:713c498741b54debd3a10a5529e70b6ed85ca33c3e8629e24ae5cd8160b5a5f2"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:730837b8f63941065c9c955c44286aef0987fb084ffb3f55bf1e4fe07df62269"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9f4e38b2ea09214c8e7848a19391009a18c56a3640e1ba1a606b9e57aeb63404"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:457f1d6639e0345b717ae603c79bd087a35361ce68c1c308d154b80b841e5e7d"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:49a55aeb8ea625a87965a96e361bbb1ad67d0931bfb2a575f899c1064e70c2da"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9230058d8c9b1a04079afae4650fb67745f0f1c39db335728f64d48bd2c19246"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8798258bd556542dd9c6b8ebe62f9c5110c9dcdf97c57fb077e7b8b6d6da0826"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-win32.whl", hash = "sha256:ce8e3f4be46bcc63555863f70ab0035202b082b37e6f16876ef50e7bc4b47056"},
+ {file = "clickhouse_driver-0.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:2d982959ff628255808d895a67493f2dab0c3a9bfc65eeda0f00c8ae9962a1b3"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a46b227fab4420566ed24ee70d90076226d16fcf09c6ad4d428717efcf536446"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eaa2ce5ea08cf5fddebb8c274c450e102f329f9e6966b6cd85aa671c48e5552"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97f0083194d6e23b5ef6156ed0d5388c37847b298118199d7937ba26412a9e2"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6cab5cdbb0f8ee51d879d977b78f07068b585225ac656f3c081896c362e8f83"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdb1b011a53ee71539e9dc655f268b111bac484db300da92829ed59e910a8fd0"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf51bb761b281d20910b4b689c699ef98027845467daa5bb5dfdb53bd6ee404"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8ea462e3cebb121ff55002e9c8a9a0a3fd9b5bbbf688b4960f0a83c0172fb31"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:70bee21c245226ad0d637bf470472e2d487b86911b6d673a862127b934336ff4"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:253a3c223b944d691bf0abbd599f592ea3b36f0a71d2526833b1718f37eca5c2"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a6549b53fc5c403dc556cb39b2ae94d73f9b113daa00438a660bb1dd5380ae4d"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1c685cd4abe61af1c26279ff04b9f567eb4d6c1ec7fb265af7481b1f153043aa"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e25144219577491929d032a6c3ddd63c6cd7fa764af829a5637f798190d9b26"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-win32.whl", hash = "sha256:0b9925610d25405a8e6d83ff4f54fc2456a121adb0155999972f5edd6ba3efc8"},
+ {file = "clickhouse_driver-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:b243de483cfa02716053b0148d73558f4694f3c27b97fc1eaa97d7079563a14d"},
+ {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45a3d5b1d06750fd6a18c29b871494a2635670099ec7693e756a5885a4a70dbf"},
+ {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8415ffebd6ca9eef3024763abc450f8659f1716d015bd563c537d01c7fbc3569"},
+ {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace48db993aa4bd31c42de0fa8d38c94ad47405916d6b61f7a7168a48fb52ac1"},
+ {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b07123334fe143bfe6fa4e3d4b732d647d5fd2cfb9ec7f2f76104b46fe9d20c6"},
+ {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2af3efa73d296420ce6362789f5b1febf75d4aa159a479393f01549115509d5"},
+ {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baf57eede88d07a1eb04352d26fc58a4d97991ca3d8840f7c5d48691dec9f251"},
+ {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275d0ccdab9c3571bdb3e9acfab4497930aa584ff2766b035bb2f854deaf8b82"},
+ {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:293da77bfcac3168fb35b27c242f97c1a05502435c0686ecbb8e2e4abcb3de26"},
+ {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6c2e5830705e4eeef33070ca4d5a24dfa221f28f2f540e5e6842c26e70b10b"},
+ {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:11934bd78d97dd7e1a23a6222b5edd1e1b4d34e1ead5c846dc2b5c56fdc35ff5"},
+ {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b802b6f0fbdcc3ab81b87f09b694dde91ab049f44d1d2c08c3dc8ea9a5950cfa"},
+ {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af871c5315eb829ecf4533c790461ea8f73b3bfd5f533b0467e479fdf6ddcfd"},
+ {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d577dd4867b9e26cf60590e1f500990c8701a6e3cfbb9e644f4d0c0fb607028"},
+ {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ed3dea2d1eca85fef5b8564ddd76dedb15a610c77d55d555b49d9f7c896b64b"},
+ {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:91ec96f2c48e5bdeac9eea43a9bc9cc19acb2d2c59df0a13d5520dfc32457605"},
+ {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7667ab423452754f36ba8fb41e006a46baace9c94e2aca2a745689b9f2753dfb"},
+ {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653583b1f3b088d106f180d6f02c90917ecd669ec956b62903a05df4a7f44863"},
+ {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef3dd0cbdf2f0171caab90389af0ede068ec802bf46c6a77f14e6edc86671bc"},
+ {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11b1833ee8ff8d5df39a34a895e060b57bd81e05ea68822bc60476daff4ce1c8"},
+ {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a3195639e6393b9d4aafe736036881ff86b6be5855d4bf7d9f5c31637181ec3"},
+]
+
+[package.dependencies]
+pytz = "*"
+tzlocal = "*"
+
+[package.extras]
+lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"]
+numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"]
+zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"]
+
[[package]]
name = "cloud-sql-python-connector"
version = "1.12.0"
@@ -2463,7 +2585,7 @@ name = "ndg-httpsclient"
version = "0.5.1"
description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL"
optional = false
-python-versions = ">=2.7,<3.0.0 || >=3.4.0"
+python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0"
files = [
{file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"},
{file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"},
@@ -3956,6 +4078,7 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@@ -4800,6 +4923,23 @@ files = [
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
]
+[[package]]
+name = "tzlocal"
+version = "5.2"
+description = "tzinfo object for the local timezone"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"},
+ {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"},
+]
+
+[package.dependencies]
+tzdata = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"]
+
[[package]]
name = "uptime-kuma-api"
version = "1.2.1"
@@ -5158,4 +5298,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = ">=3.11,<3.12"
-content-hash = "c310f41b772e1e04d73e55bc1c3d5c39c46e49fbf26739e6650705ceea33cdc2"
+content-hash = "a94488bcf0d635773f1dc9d235a623e6818ed8f7681f4128a7e214c4304e005a"
diff --git a/pyproject.toml b/pyproject.toml
index 4ad060fca..a1476b0c9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -93,6 +93,7 @@ quickchart-io = "^2.0.0"
scipy = "^1.14.1"
networkx = "^3.3"
google-auth = "2.34.0"
+clickhouse-driver = "^0.2.9"
[tool.poetry.group.dev.dependencies]
pre-commit = "^3.0.4"
pre-commit-hooks = "^4.4.0"
From f9cd4cffa0c161f6fb101baf6e6c57e35cb7407e Mon Sep 17 00:00:00 2001
From: Shahar Glazner
Date: Wed, 11 Sep 2024 09:26:06 +0300
Subject: [PATCH 02/10] chore: 0.24.1 (#1894)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index a1476b0c9..5d20a639c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "keep"
-version = "0.24.0"
+version = "0.24.1"
description = "Alerting. for developers, by developers."
authors = ["Keep Alerting LTD"]
readme = "README.md"
From b23778bdc5f7d81e0413a29f9fba2c02281643b9 Mon Sep 17 00:00:00 2001
From: Nejc Ravnik
Date: Wed, 11 Sep 2024 09:15:28 +0200
Subject: [PATCH 03/10] feat: add the option to disable workflow (#1766)
Co-authored-by: Tal
Co-authored-by: Matvey Kukuy
---
docs/workflows/syntax/basic-syntax.mdx | 1 +
.../workflows/builder/builder-validators.tsx | 2 +-
keep-ui/app/workflows/builder/builder.tsx | 2 +-
keep-ui/app/workflows/builder/editors.tsx | 185 +++++++++++-------
keep-ui/app/workflows/builder/utils.tsx | 5 +
keep-ui/app/workflows/models.tsx | 1 +
keep-ui/app/workflows/workflow-menu.tsx | 9 +-
keep-ui/app/workflows/workflow-tile.tsx | 7 +
keep/api/core/db.py | 10 +-
.../versions/2024-08-30-09-34_7ed12220a0d3.py | 82 ++++++++
.../versions/2024-09-04-09-38_b30d2141e1cb.py | 21 ++
.../versions/2024-09-10-17-59_710b4ff1d19e.py | 21 ++
keep/api/models/db/workflow.py | 1 +
keep/api/models/workflow.py | 4 +-
keep/api/routes/workflows.py | 4 +-
keep/parser/parser.py | 7 +
keep/workflowmanager/workflow.py | 5 +
keep/workflowmanager/workflowmanager.py | 6 +
keep/workflowmanager/workflowstore.py | 2 +
tests/test_workflow_execution.py | 100 ++++++++++
20 files changed, 389 insertions(+), 86 deletions(-)
create mode 100644 keep/api/models/db/migrations/versions/2024-08-30-09-34_7ed12220a0d3.py
create mode 100644 keep/api/models/db/migrations/versions/2024-09-04-09-38_b30d2141e1cb.py
create mode 100644 keep/api/models/db/migrations/versions/2024-09-10-17-59_710b4ff1d19e.py
diff --git a/docs/workflows/syntax/basic-syntax.mdx b/docs/workflows/syntax/basic-syntax.mdx
index 9d2c7be95..143dff685 100644
--- a/docs/workflows/syntax/basic-syntax.mdx
+++ b/docs/workflows/syntax/basic-syntax.mdx
@@ -41,6 +41,7 @@ workflow:
workflow:
id: raw-sql-query
description: Monitor that time difference is no more than 1 hour
+ disabled: Optionally prevent this workflow from running
steps:
-
actions:
diff --git a/keep-ui/app/workflows/builder/builder-validators.tsx b/keep-ui/app/workflows/builder/builder-validators.tsx
index fe03e776f..b9742a2ed 100644
--- a/keep-ui/app/workflows/builder/builder-validators.tsx
+++ b/keep-ui/app/workflows/builder/builder-validators.tsx
@@ -18,7 +18,7 @@ export function globalValidatorV2(
if (
!!definition?.properties &&
- !definition.properties['manual'] &&
+ !definition.properties['manual'] &&
!definition.properties['interval'] &&
!definition.properties['alert']
) {
diff --git a/keep-ui/app/workflows/builder/builder.tsx b/keep-ui/app/workflows/builder/builder.tsx
index b2c7565e6..3b33d8470 100644
--- a/keep-ui/app/workflows/builder/builder.tsx
+++ b/keep-ui/app/workflows/builder/builder.tsx
@@ -186,7 +186,7 @@ function Builder({
triggers = { alert: { source: alertSource, name: alertName } };
}
setDefinition(
- wrapDefinitionV2({...generateWorkflow(alertUuid, "", "", [], [], triggers), isValid: true})
+ wrapDefinitionV2({...generateWorkflow(alertUuid, "", "", false,[], [], triggers), isValid: true})
);
} else {
setDefinition(wrapDefinitionV2({...parseWorkflow(loadedAlertFile!, providers), isValid:true}));
diff --git a/keep-ui/app/workflows/builder/editors.tsx b/keep-ui/app/workflows/builder/editors.tsx
index c7a44771f..8ebcc7d76 100644
--- a/keep-ui/app/workflows/builder/editors.tsx
+++ b/keep-ui/app/workflows/builder/editors.tsx
@@ -292,81 +292,118 @@ function WorkflowEditorV2({
const isTrigger = ["manual", "alert", 'interval'].includes(key) ;
renderDivider = isTrigger && key === selectedNode ? !renderDivider : false;
return (
-
- { renderDivider &&
}
- {((key === selectedNode)||(!isTrigger)) &&
{key}}
- {key === "manual" ? (
- selectedNode === 'manual' &&
-
- setProperties({
- ...properties,
- [key]: e.target.checked ? "true" : "false",
- })
- }
- disabled={true}
- />
+
+ {renderDivider &&
}
+ {((key === selectedNode) || (!isTrigger)) &&
{key}}
+
+ {(() => {
+ switch (key) {
+ case "manual":
+ return (
+ selectedNode === "manual" && (
+
+
+ setProperties({
+ ...properties,
+ [key]: e.target.checked ? "true" : "false",
+ })
+ }
+ disabled={true}
+ />
+
+ )
+ );
+
+ case "alert":
+ return (
+ selectedNode === "alert" && (
+ <>
+
+
+
+ {properties.alert &&
+ Object.keys(properties.alert as {}).map((filter) => {
+ return (
+ <>
+
{filter}
+
+
+ updateAlertFilter(filter, e.target.value)
+ }
+ value={(properties.alert as any)[filter] as string}
+ />
+ deleteFilter(filter)}
+ />
+
+ >
+ );
+ })}
+ >
+ )
+ );
+
+ case "interval":
+ return (
+ selectedNode === "interval" && (
+
+ setProperties({ ...properties, [key]: e.target.value })
+ }
+ value={properties[key] as string}
+ />
+ )
+ );
+ case "disabled":
+ return (
+
+
+ setProperties({
+ ...properties,
+ [key]: e.target.checked ? "true" : "false",
+ })
+ }
+ />
+
+ );
+ default:
+ return (
+
+ setProperties({ ...properties, [key]: e.target.value })
+ }
+ value={properties[key] as string}
+ />
+ );
+ }
+ })()}
- ) : key === "alert" ? (
- selectedNode === 'alert' && <>
-
-
-
- {properties.alert &&
- Object.keys(properties.alert as {}).map((filter) => {
- return (
- <>
-
{filter}
-
-
- updateAlertFilter(filter, e.target.value)
- }
- value={(properties.alert as any)[filter] as string}
- />
- deleteFilter(filter)}
- />
-
- >
- );
- })}
- >
- ) : key === "interval" ? (
- selectedNode === 'interval' &&
- setProperties({ ...properties, [key]: e.target.value })
- }
- value={properties[key] as string}
- />
- ):
- setProperties({ ...properties, [key]: e.target.value })
- }
- value={properties[key] as string}
- />}
-
-
- );
+ );
+
})}
>
);
diff --git a/keep-ui/app/workflows/builder/utils.tsx b/keep-ui/app/workflows/builder/utils.tsx
index d89ef574e..4ac183c58 100644
--- a/keep-ui/app/workflows/builder/utils.tsx
+++ b/keep-ui/app/workflows/builder/utils.tsx
@@ -211,6 +211,7 @@ export function generateWorkflow(
workflowId: string,
name: string,
description: string,
+ disabled: boolean,
steps: V2Step[],
conditions: V2Step[],
triggers: { [key: string]: { [key: string]: string } } = {}
@@ -225,6 +226,7 @@ export function generateWorkflow(
id: workflowId,
name: name,
description: description,
+ disabled:disabled,
isLocked: true,
...triggers,
},
@@ -305,6 +307,7 @@ export function parseWorkflow(
workflow.id,
workflow.name,
workflow.description,
+ workflow.disabled,
steps,
conditions,
triggers
@@ -384,6 +387,7 @@ export function buildAlert(definition: Definition): Alert {
const alertId = alert.properties.id as string;
const name = (alert.properties.name as string) ?? "";
const description = (alert.properties.description as string) ?? "";
+ const disabled = (alert.properties.disabled) ?? false
const owners = (alert.properties.owners as string[]) ?? [];
const services = (alert.properties.services as string[]) ?? [];
// Steps (move to func?)
@@ -510,6 +514,7 @@ export function buildAlert(definition: Definition): Alert {
name: name,
triggers: triggers,
description: description,
+ disabled : Boolean(disabled),
owners: owners,
services: services,
steps: steps,
diff --git a/keep-ui/app/workflows/models.tsx b/keep-ui/app/workflows/models.tsx
index 78110f86b..7b548a518 100644
--- a/keep-ui/app/workflows/models.tsx
+++ b/keep-ui/app/workflows/models.tsx
@@ -33,6 +33,7 @@ export type Workflow = {
interval: string;
providers: Provider[];
triggers: Trigger[];
+ disabled:boolean,
last_execution_time: string;
last_execution_status: string;
last_updated: string;
diff --git a/keep-ui/app/workflows/workflow-menu.tsx b/keep-ui/app/workflows/workflow-menu.tsx
index b15651a81..732ff3d68 100644
--- a/keep-ui/app/workflows/workflow-menu.tsx
+++ b/keep-ui/app/workflows/workflow-menu.tsx
@@ -3,7 +3,7 @@ import { Fragment } from "react";
import { EllipsisHorizontalIcon } from "@heroicons/react/20/solid";
import { Icon } from "@tremor/react";
import { EyeIcon, PencilIcon, PlayIcon, TrashIcon, WrenchIcon } from "@heroicons/react/24/outline";
-import { DownloadIcon } from "@radix-ui/react-icons";
+import {DownloadIcon, LockClosedIcon, LockOpen1Icon} from "@radix-ui/react-icons";
interface WorkflowMenuProps {
onDelete?: () => Promise
;
@@ -14,6 +14,7 @@ interface WorkflowMenuProps {
allProvidersInstalled: boolean;
hasManualTrigger: boolean;
hasAlertTrigger: boolean;
+ isWorkflowDisabled:boolean
}
@@ -25,18 +26,20 @@ export default function WorkflowMenu({
onBuilder,
allProvidersInstalled,
hasManualTrigger,
- hasAlertTrigger
+ hasAlertTrigger,
+ isWorkflowDisabled,
}: WorkflowMenuProps) {
const getDisabledTooltip = () => {
if (!allProvidersInstalled) return "Not all providers are installed.";
if (!hasManualTrigger) return "No manual trigger available.";
+ if (isWorkflowDisabled) return "Workflow is disabled";
return "";
};
const stopPropagation = (e: React.MouseEvent) => {
e.stopPropagation();
};
- const isRunButtonDisabled = !allProvidersInstalled || (!hasManualTrigger && !hasAlertTrigger);
+ const isRunButtonDisabled = !allProvidersInstalled || (!hasManualTrigger && !hasAlertTrigger) || isWorkflowDisabled;
return (
diff --git a/keep-ui/app/workflows/workflow-tile.tsx b/keep-ui/app/workflows/workflow-tile.tsx
index 383daf703..28307ff5d 100644
--- a/keep-ui/app/workflows/workflow-tile.tsx
+++ b/keep-ui/app/workflows/workflow-tile.tsx
@@ -78,6 +78,7 @@ function WorkflowMenuSection({
onView={onView}
onBuilder={onBuilder}
allProvidersInstalled={allProvidersInstalled}
+ isWorkflowDisabled={workflow.disabled}
hasManualTrigger={hasManualTrigger}
hasAlertTrigger={hasAlertTrigger}
/>
@@ -1080,6 +1081,12 @@ export function WorkflowTileOld({ workflow }: { workflow: Workflow }) {
: "N/A"}
+
+ Disabled
+
+ {workflow?.disabled?.toString()}
+
+
diff --git a/keep/api/core/db.py b/keep/api/core/db.py
index c31353dfd..9d7d5c40b 100644
--- a/keep/api/core/db.py
+++ b/keep/api/core/db.py
@@ -156,6 +156,7 @@ def get_workflows_that_should_run():
workflows_with_interval = (
session.query(Workflow)
.filter(Workflow.is_deleted == False)
+ .filter(Workflow.is_disabled == False)
.filter(Workflow.interval != None)
.filter(Workflow.interval > 0)
.all()
@@ -276,6 +277,7 @@ def add_or_update_workflow(
created_by,
interval,
workflow_raw,
+ is_disabled,
updated_by=None,
) -> Workflow:
with Session(engine, expire_on_commit=False) as session:
@@ -299,6 +301,7 @@ def add_or_update_workflow(
existing_workflow.revision += 1 # Increment the revision
existing_workflow.last_updated = datetime.now() # Update last_updated
existing_workflow.is_deleted = False
+ existing_workflow.is_disabled= is_disabled
else:
# Create a new workflow
@@ -310,6 +313,7 @@ def add_or_update_workflow(
created_by=created_by,
updated_by=updated_by, # Set updated_by to the provided value
interval=interval,
+ is_disabled =is_disabled,
workflow_raw=workflow_raw,
)
session.add(workflow)
@@ -495,7 +499,6 @@ def get_raw_workflow(tenant_id: str, workflow_id: str) -> str:
return None
return workflow.workflow_raw
-
def update_provider_last_pull_time(tenant_id: str, provider_id: str):
extra = {"tenant_id": tenant_id, "provider_id": provider_id}
logger.info("Updating provider last pull time", extra=extra)
@@ -1333,7 +1336,7 @@ def save_workflow_results(tenant_id, workflow_execution_id, workflow_results):
session.commit()
-def get_workflow_id_by_name(tenant_id, workflow_name):
+def get_workflow_by_name(tenant_id, workflow_name):
with Session(engine) as session:
workflow = session.exec(
select(Workflow)
@@ -1342,8 +1345,7 @@ def get_workflow_id_by_name(tenant_id, workflow_name):
.where(Workflow.is_deleted == False)
).first()
- if workflow:
- return workflow.id
+ return workflow
def get_previous_execution_id(tenant_id, workflow_id, workflow_execution_id):
diff --git a/keep/api/models/db/migrations/versions/2024-08-30-09-34_7ed12220a0d3.py b/keep/api/models/db/migrations/versions/2024-08-30-09-34_7ed12220a0d3.py
new file mode 100644
index 000000000..97c81789b
--- /dev/null
+++ b/keep/api/models/db/migrations/versions/2024-08-30-09-34_7ed12220a0d3.py
@@ -0,0 +1,82 @@
+"""Added is_disabled to workflows
+
+Revision ID: 7ed12220a0d3
+Revises: 1c650a429672
+Create Date: 2024-08-30 09:34:41.782797
+
+"""
+
+import sqlalchemy as sa
+import yaml
+from alembic import op
+
+from keep.parser.parser import Parser
+
+# revision identifiers, used by Alembic.
+revision = "7ed12220a0d3"
+down_revision = "1c650a429672"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ with op.batch_alter_table("workflow", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("is_disabled", sa.Boolean(), nullable=False, server_default=sa.false()))
+
+ connection = op.get_bind()
+ workflows = connection.execute(sa.text("SELECT id, workflow_raw FROM workflow")).fetchall()
+
+ updates = []
+ for workflow in workflows:
+ try:
+ workflow_yaml = yaml.safe_load(workflow.workflow_raw)
+ # If, by any chance, the existing workflow YAML's "disabled" value resolves to true,
+ # we need to update the database to set `is_disabled` to `True`
+ if Parser.parse_disabled(workflow_yaml):
+ updates.append({
+ 'id': workflow.id,
+ 'is_disabled': True
+ })
+ except Exception as e:
+ print(f"Failed to parse workflow_raw for workflow id {workflow.id}: {e}")
+ continue
+
+ if updates:
+ connection.execute(
+ sa.text(
+ "UPDATE workflow SET is_disabled = :is_disabled WHERE id = :id"
+ ),
+ updates
+ )
+
+
+
+def downgrade() -> None:
+ with op.batch_alter_table("workflow", schema=None) as batch_op:
+ batch_op.drop_column("is_disabled")
+
+ connection = op.get_bind()
+ workflows = connection.execute(sa.text("SELECT id, workflow_raw FROM workflow")).fetchall()
+
+ updates = []
+ for workflow in workflows:
+ try:
+ workflow_yaml = yaml.safe_load(workflow.workflow_raw)
+ if 'disabled' in workflow_yaml:
+ workflow_yaml.pop('disabled', None)
+ updated_workflow_raw = yaml.safe_dump(workflow_yaml)
+ updates.append({
+ 'id': workflow.id,
+ 'workflow_raw': updated_workflow_raw
+ })
+ except Exception as e:
+ print(f"Failed to parse workflow_raw for workflow id {workflow.id}: {e}")
+ continue
+
+ if updates:
+ connection.execute(
+ sa.text(
+ "UPDATE workflow SET workflow_raw = :workflow_raw WHERE id = :id"
+ ),
+ updates
+ )
diff --git a/keep/api/models/db/migrations/versions/2024-09-04-09-38_b30d2141e1cb.py b/keep/api/models/db/migrations/versions/2024-09-04-09-38_b30d2141e1cb.py
new file mode 100644
index 000000000..efe165a13
--- /dev/null
+++ b/keep/api/models/db/migrations/versions/2024-09-04-09-38_b30d2141e1cb.py
@@ -0,0 +1,21 @@
+"""Merge migrations to resolve double-headed issue
+
+Revision ID: b30d2141e1cb
+Revises: 7ed12220a0d3, 49e7c02579db
+Create Date: 2024-09-04 09:38:33.869973
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = "b30d2141e1cb"
+down_revision = ("7ed12220a0d3", "49e7c02579db")
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ pass
+
+
+def downgrade() -> None:
+ pass
diff --git a/keep/api/models/db/migrations/versions/2024-09-10-17-59_710b4ff1d19e.py b/keep/api/models/db/migrations/versions/2024-09-10-17-59_710b4ff1d19e.py
new file mode 100644
index 000000000..c2a93173d
--- /dev/null
+++ b/keep/api/models/db/migrations/versions/2024-09-10-17-59_710b4ff1d19e.py
@@ -0,0 +1,21 @@
+"""empty message
+
+Revision ID: 710b4ff1d19e
+Revises: 1aacee84447e, b30d2141e1cb
+Create Date: 2024-09-10 17:59:56.210094
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = "710b4ff1d19e"
+down_revision = ("1aacee84447e", "b30d2141e1cb")
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ pass
+
+
+def downgrade() -> None:
+ pass
diff --git a/keep/api/models/db/workflow.py b/keep/api/models/db/workflow.py
index c03f56ab7..f2c575863 100644
--- a/keep/api/models/db/workflow.py
+++ b/keep/api/models/db/workflow.py
@@ -16,6 +16,7 @@ class Workflow(SQLModel, table=True):
interval: Optional[int]
workflow_raw: str = Field(sa_column=Column(TEXT))
is_deleted: bool = Field(default=False)
+ is_disabled: bool = Field(default=False)
revision: int = Field(default=1, nullable=False)
last_updated: datetime = Field(default_factory=datetime.utcnow)
diff --git a/keep/api/models/workflow.py b/keep/api/models/workflow.py
index 743a2efcc..8c6b75314 100644
--- a/keep/api/models/workflow.py
+++ b/keep/api/models/workflow.py
@@ -29,6 +29,7 @@ class WorkflowDTO(BaseModel):
creation_time: datetime
triggers: List[dict] = None
interval: int
+ disabled:bool
last_execution_time: datetime = None
last_execution_status: str = None
providers: List[ProviderDTO]
@@ -66,9 +67,10 @@ def manipulate_raw(cls, raw, values):
ordered_raw["id"] = d.get("id")
values["workflow_raw_id"] = d.get("id")
ordered_raw["description"] = d.get("description")
+ ordered_raw["disabled"] = d.get("disabled")
ordered_raw["triggers"] = d.get("triggers")
for key, val in d.items():
- if key not in ["id", "description", "triggers", "steps", "actions"]:
+ if key not in ["id", "description", "disabled", "triggers", "steps", "actions"]:
ordered_raw[key] = val
# than steps and actions
ordered_raw["steps"] = d.get("steps")
diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py
index 8f89dbe3a..173341d85 100644
--- a/keep/api/routes/workflows.py
+++ b/keep/api/routes/workflows.py
@@ -27,7 +27,7 @@
get_workflow,
)
from keep.api.core.db import get_workflow_executions as get_workflow_executions_db
-from keep.api.core.db import get_workflow_id_by_name
+from keep.api.core.db import get_workflow_by_name
from keep.api.models.alert import AlertDto
from keep.api.models.workflow import (
ProviderDTO,
@@ -215,7 +215,7 @@ def run_workflow(
# if the workflow id is the name of the workflow (e.g. the CLI has only the name)
if not validators.uuid(workflow_id):
logger.info("Workflow ID is not a UUID, trying to get the ID by name")
- workflow_id = get_workflow_id_by_name(tenant_id, workflow_id)
+ workflow_id = getattr(get_workflow_by_name(tenant_id, workflow_id), 'id', None)
workflowmanager = WorkflowManager.get_instance()
# Finally, run it
diff --git a/keep/parser/parser.py b/keep/parser/parser.py
index 1398261fc..33ae67c7e 100644
--- a/keep/parser/parser.py
+++ b/keep/parser/parser.py
@@ -150,6 +150,7 @@ def _parse_workflow(
tenant_id, context_manager, workflow, actions_file, workflow_actions
)
workflow_id = self._parse_id(workflow)
+ workflow_disabled = self.__class__.parse_disabled(workflow)
workflow_owners = self._parse_owners(workflow)
workflow_tags = self._parse_tags(workflow)
workflow_steps = self._parse_steps(context_manager, workflow)
@@ -168,6 +169,7 @@ def _parse_workflow(
workflow = Workflow(
workflow_id=workflow_id,
workflow_description=workflow.get("description"),
+ workflow_disabled=workflow_disabled,
workflow_owners=workflow_owners,
workflow_tags=workflow_tags,
workflow_interval=workflow_interval,
@@ -323,6 +325,11 @@ def parse_interval(self, workflow) -> int:
workflow_interval = trigger.get("value", 0)
return workflow_interval
+ @staticmethod
+ def parse_disabled(workflow_dict: dict) -> bool:
+ workflow_is_disabled_in_yml = workflow_dict.get("disabled")
+ return True if (workflow_is_disabled_in_yml == "true" or workflow_is_disabled_in_yml is True) else False
+
@staticmethod
def parse_provider_parameters(provider_parameters: dict) -> dict:
parsed_provider_parameters = {}
diff --git a/keep/workflowmanager/workflow.py b/keep/workflowmanager/workflow.py
index 66299ea1e..f945da2f2 100644
--- a/keep/workflowmanager/workflow.py
+++ b/keep/workflowmanager/workflow.py
@@ -27,6 +27,7 @@ def __init__(
workflow_steps: typing.List[Step],
workflow_actions: typing.List[Step],
workflow_description: str = None,
+ workflow_disabled:bool = False,
workflow_providers: typing.List[dict] = None,
workflow_providers_type: typing.List[str] = [],
workflow_strategy: WorkflowStrategy = WorkflowStrategy.NONPARALLEL_WITH_RETRY.value,
@@ -40,6 +41,7 @@ def __init__(
self.workflow_steps = workflow_steps
self.workflow_actions = workflow_actions
self.workflow_description = workflow_description
+ self.workflow_disabled = workflow_disabled
self.workflow_providers = workflow_providers
self.workflow_providers_type = workflow_providers_type
self.workflow_strategy = workflow_strategy
@@ -87,6 +89,9 @@ def run_actions(self):
return actions_firing, actions_errors
def run(self, workflow_execution_id):
+ if self.workflow_disabled:
+ self.logger.info(f"Skipping disabled workflow {self.workflow_id}")
+ return
self.logger.info(f"Running workflow {self.workflow_id}")
self.context_manager.set_execution_context(workflow_execution_id)
try:
diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py
index 8e49ad386..d50ab8939 100644
--- a/keep/workflowmanager/workflowmanager.py
+++ b/keep/workflowmanager/workflowmanager.py
@@ -79,6 +79,12 @@ def insert_events(self, tenant_id, events: typing.List[AlertDto]):
},
)
for workflow_model in all_workflow_models:
+ if workflow_model.is_disabled:
+ self.logger.debug(
+ f"Skipping the workflow: id={workflow_model.id}, name={workflow_model.name}, "
+ f"tenant_id={workflow_model.tenant_id} - Workflow is disabled."
+ )
+ continue
try:
# get the actual workflow that can be triggered
self.logger.info("Getting workflow from store")
diff --git a/keep/workflowmanager/workflowstore.py b/keep/workflowmanager/workflowstore.py
index 231627625..7eda1943c 100644
--- a/keep/workflowmanager/workflowstore.py
+++ b/keep/workflowmanager/workflowstore.py
@@ -42,6 +42,7 @@ def create_workflow(self, tenant_id: str, created_by, workflow: dict):
workflow["name"] = workflow_name
else:
workflow_name = workflow.get("name")
+
workflow = add_or_update_workflow(
id=str(uuid.uuid4()),
name=workflow_name,
@@ -49,6 +50,7 @@ def create_workflow(self, tenant_id: str, created_by, workflow: dict):
description=workflow.get("description"),
created_by=created_by,
interval=interval,
+ is_disabled=Parser.parse_disabled(workflow),
workflow_raw=yaml.dump(workflow),
)
self.logger.info(f"Workflow {workflow_id} created successfully")
diff --git a/tests/test_workflow_execution.py b/tests/test_workflow_execution.py
index 67b8014ed..b772391fd 100644
--- a/tests/test_workflow_execution.py
+++ b/tests/test_workflow_execution.py
@@ -475,3 +475,103 @@ def test_workflow_execution3(
elif expected_tier == 1:
assert workflow_execution.results["send-slack-message-tier-0"] == []
assert "Tier 1" in workflow_execution.results["send-slack-message-tier-1"][0]
+
+
+
+workflow_definition_for_enabled_disabled = """workflow:
+id: %s
+description: Handle alerts based on startedAt timestamp
+triggers:
+- type: alert
+ filters:
+ - key: name
+ value: "server-is-down"
+actions:
+- name: send-slack-message-tier-0
+ if: keep.get_firing_time('{{ alert }}', 'minutes') > 0 and keep.get_firing_time('{{ alert }}', 'minutes') < 10
+ provider:
+ type: console
+ with:
+ message: |
+ "Tier 0 Alert: {{ alert.name }} - {{ alert.description }}
+ Alert details: {{ alert }}"
+- name: send-slack-message-tier-1
+ if: "keep.get_firing_time('{{ alert }}', 'minutes') >= 10 and keep.get_firing_time('{{ alert }}', 'minutes') < 30"
+ provider:
+ type: console
+ with:
+ message: |
+ "Tier 1 Alert: {{ alert.name }} - {{ alert.description }}
+ Alert details: {{ alert }}"
+"""
+
+
+def test_workflow_execution_with_disabled_workflow(
+ db_session,
+ create_alert,
+ workflow_manager,
+):
+ enabled_id = "enabled-workflow"
+ enabled_workflow = Workflow(
+ id=enabled_id,
+ name="enabled-workflow",
+ tenant_id=SINGLE_TENANT_UUID,
+ description="This workflow is enabled and should be executed",
+ created_by="test@keephq.dev",
+ interval=0,
+ is_disabled=False,
+ workflow_raw=workflow_definition_for_enabled_disabled % enabled_id
+ )
+
+ disabled_id = "disabled-workflow"
+ disabled_workflow = Workflow(
+ id=disabled_id,
+ name="disabled-workflow",
+ tenant_id=SINGLE_TENANT_UUID,
+ description="This workflow is disabled and should not be executed",
+ created_by="test@keephq.dev",
+ interval=0,
+ is_disabled=True,
+ workflow_raw=workflow_definition_for_enabled_disabled % disabled_id
+ )
+
+ db_session.add(enabled_workflow)
+ db_session.add(disabled_workflow)
+ db_session.commit()
+
+ base_time = datetime.now(tz=pytz.utc)
+
+ create_alert("fp1", AlertStatus.FIRING, base_time)
+ current_alert = AlertDto(
+ id="grafana-1",
+ source=["grafana"],
+ name="server-is-down",
+ status=AlertStatus.FIRING,
+ severity="critical",
+ fingerprint="fp1",
+ )
+
+ # Sleep one second to avoid the case where tier0 alerts are not triggered
+ time.sleep(1)
+
+ workflow_manager.insert_events(SINGLE_TENANT_UUID, [current_alert])
+
+ enabled_workflow_execution = None
+ disabled_workflow_execution = None
+ count = 0
+
+ while (enabled_workflow_execution is None and disabled_workflow_execution is None) and count < 30:
+ enabled_workflow_execution = get_last_workflow_execution_by_workflow_id(
+ SINGLE_TENANT_UUID, enabled_id
+ )
+ disabled_workflow_execution = get_last_workflow_execution_by_workflow_id(
+ SINGLE_TENANT_UUID, disabled_id
+ )
+
+ time.sleep(1)
+ count += 1
+
+ assert enabled_workflow_execution is not None
+ assert enabled_workflow_execution.status == "success"
+
+ assert disabled_workflow_execution is None
From 1cf640ef686c160146f80c2377dac94d75e6f23c Mon Sep 17 00:00:00 2001
From: Shahar Glazner
Date: Wed, 11 Sep 2024 11:44:57 +0300
Subject: [PATCH 04/10] fix: disabled (#1897)
---
keep/api/routes/workflows.py | 25 +++++++++++++++++++------
1 file changed, 19 insertions(+), 6 deletions(-)
diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py
index 173341d85..0a4072b6f 100644
--- a/keep/api/routes/workflows.py
+++ b/keep/api/routes/workflows.py
@@ -25,9 +25,9 @@
get_last_workflow_workflow_to_alert_executions,
get_session,
get_workflow,
+ get_workflow_by_name,
)
from keep.api.core.db import get_workflow_executions as get_workflow_executions_db
-from keep.api.core.db import get_workflow_by_name
from keep.api.models.alert import AlertDto
from keep.api.models.workflow import (
ProviderDTO,
@@ -37,13 +37,13 @@
WorkflowExecutionLogsDTO,
WorkflowToAlertExecutionDTO,
)
+from keep.api.utils.pagination import WorkflowExecutionsPaginatedResultsDto
from keep.identitymanager.authenticatedentity import AuthenticatedEntity
from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory
from keep.parser.parser import Parser
from keep.providers.providers_factory import ProvidersFactory
from keep.workflowmanager.workflowmanager import WorkflowManager
from keep.workflowmanager.workflowstore import WorkflowStore
-from keep.api.utils.pagination import WorkflowExecutionsPaginatedResultsDto
router = APIRouter()
logger = logging.getLogger(__name__)
@@ -177,6 +177,7 @@ def get_workflows(
last_updated=workflow.last_updated,
last_executions=last_executions,
last_execution_started=last_execution_started,
+ disabled=workflow.disabled,
)
workflows_dto.append(workflow_dto)
return workflows_dto
@@ -215,7 +216,7 @@ def run_workflow(
# if the workflow id is the name of the workflow (e.g. the CLI has only the name)
if not validators.uuid(workflow_id):
logger.info("Workflow ID is not a UUID, trying to get the ID by name")
- workflow_id = getattr(get_workflow_by_name(tenant_id, workflow_id), 'id', None)
+ workflow_id = getattr(get_workflow_by_name(tenant_id, workflow_id), "id", None)
workflowmanager = WorkflowManager.get_instance()
# Finally, run it
@@ -550,7 +551,18 @@ def get_workflow_by_id(
workflow = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
with tracer.start_as_current_span("get_workflow_executions"):
- total_count, workflow_executions, pass_count, fail_count, avgDuration = get_workflow_executions_db(tenant_id, workflow_id, limit, offset, tab, status, trigger, execution_id)
+ total_count, workflow_executions, pass_count, fail_count, avgDuration = (
+ get_workflow_executions_db(
+ tenant_id,
+ workflow_id,
+ limit,
+ offset,
+ tab,
+ status,
+ trigger,
+ execution_id,
+ )
+ )
workflow_executions_dtos = []
with tracer.start_as_current_span("create_workflow_dtos"):
for workflow_execution in workflow_executions:
@@ -566,16 +578,17 @@ def get_workflow_by_id(
workflow_executions_dtos.append(workflow_execution_dto)
return WorkflowExecutionsPaginatedResultsDto(
- limit=limit,
+ limit=limit,
offset=offset,
count=total_count,
items=workflow_executions_dtos,
passCount=pass_count,
failCount=fail_count,
avgDuration=avgDuration,
- workflow=workflow
+ workflow=workflow,
)
+
@router.delete("/{workflow_id}", description="Delete workflow")
def delete_workflow_by_id(
workflow_id: str,
From 6bee31bd10e0e4e6e5396bbf4b568258391f694a Mon Sep 17 00:00:00 2001
From: Shahar Glazner
Date: Wed, 11 Sep 2024 11:59:40 +0300
Subject: [PATCH 05/10] fix: workflow is_disabled (#1900)
---
keep/api/routes/workflows.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py
index 0a4072b6f..3838b12b7 100644
--- a/keep/api/routes/workflows.py
+++ b/keep/api/routes/workflows.py
@@ -177,7 +177,7 @@ def get_workflows(
last_updated=workflow.last_updated,
last_executions=last_executions,
last_execution_started=last_execution_started,
- disabled=workflow.disabled,
+ disabled=workflow.is_disabled,
)
workflows_dto.append(workflow_dto)
return workflows_dto
From 1f4eba8daeda36db7c10feebe2da37c7fe5eb7ff Mon Sep 17 00:00:00 2001
From: Rajesh Jonnalagadda <38752904+rajeshj11@users.noreply.github.com>
Date: Wed, 11 Sep 2024 19:27:49 +0530
Subject: [PATCH 06/10] fix: use custom worflowRun hook to run the workflow at
all places (#1890)
Signed-off-by: Rajesh Jonnalagadda <38752904+rajeshj11@users.noreply.github.com>
---
.../workflows/[workflow_id]/executions.tsx | 47 ++--
keep-ui/app/workflows/mockworkflows.tsx | 4 +-
keep-ui/app/workflows/workflow-menu.tsx | 25 +-
keep-ui/app/workflows/workflow-tile.tsx | 243 +++---------------
keep-ui/utils/hooks/useWorkflowRun.ts | 190 ++++++++++----
keep/api/models/workflow.py | 2 +-
keep/api/routes/workflows.py | 94 +++----
keep/api/utils/pagination.py | 3 +-
keep/workflowmanager/workflowstore.py | 64 +++++
9 files changed, 313 insertions(+), 359 deletions(-)
diff --git a/keep-ui/app/workflows/[workflow_id]/executions.tsx b/keep-ui/app/workflows/[workflow_id]/executions.tsx
index 9d134d50a..d2b844251 100644
--- a/keep-ui/app/workflows/[workflow_id]/executions.tsx
+++ b/keep-ui/app/workflows/[workflow_id]/executions.tsx
@@ -27,6 +27,7 @@ import { useWorkflowRun } from "utils/hooks/useWorkflowRun";
import BuilderWorkflowTestRunModalContent from "../builder/builder-workflow-testrun-modal";
import Modal from "react-modal";
import { TableFilters } from "./table-filters";
+import AlertTriggerModal from "../workflow-run-with-alert-modal";
const tabs = [
{ name: "All Time", value: 'alltime' },
@@ -67,10 +68,10 @@ export const FilterTabs = ({
export function StatsCard({ children, data }: { children: any, data?: string }) {
return
- {!!data &&
- {data}
-
}
- {children}
+ {!!data &&
+ {data}
+
}
+ {children}
}
@@ -109,11 +110,12 @@ export default function WorkflowDetailPage({
} = useWorkflowExecutionsV2(params.workflow_id, tab, executionPagination.limit, executionPagination.offset);
const {
- loading,
- runModalOpen,
- setRunModalOpen,
- runningWorkflowExecution,
- setRunningWorkflowExecution } = useWorkflowRun(data?.workflow?.workflow_raw!)
+ isRunning,
+ handleRunClick,
+ getTriggerModalProps,
+ isRunButtonDisabled,
+ message,
+ } = useWorkflowRun(data?.workflow!)
if (isLoading || !data) return ;
@@ -144,7 +146,7 @@ export default function WorkflowDetailPage({
} else if (num >= 1_000) {
return `${(num / 1_000).toFixed(1)}k`;
} else {
- return num.toString();
+ return num?.toString() ?? "";
}
};
@@ -159,7 +161,12 @@ export default function WorkflowDetailPage({
{/*TO DO update searchParams for these filters*/}
-
+ {!!data.workflow && }
{data?.items && (
@@ -170,16 +177,14 @@ export default function WorkflowDetailPage({
{formatNumber(data.count ?? 0)}
- {/*
__ from last month
*/}
-
+
Pass / Fail ratio
{formatNumber(data.passCount)}{'/'}{formatNumber(data.failCount)}
- {/*
__ from last month
*/}
@@ -189,7 +194,6 @@ export default function WorkflowDetailPage({
{(data.count ? (data.passCount / data.count) * 100 : 0).toFixed(2)}{"%"}
- {/*
__ from last month
*/}
@@ -199,7 +203,6 @@ export default function WorkflowDetailPage({
{(data.avgDuration ?? 0).toFixed(2)}
- {/*
__ from last month
*/}
@@ -221,16 +224,8 @@ export default function WorkflowDetailPage({
)}
- { setRunModalOpen(false); setRunningWorkflowExecution(null) }}
- className="bg-gray-50 p-4 md:p-10 mx-auto max-w-7xl mt-20 border border-orange-600/50 rounded-md"
- >
- { setRunModalOpen(false); setRunningWorkflowExecution(null) }}
- workflowExecution={runningWorkflowExecution}
- />
-
+ {!!data.workflow && !!getTriggerModalProps && }
>
);
}
diff --git a/keep-ui/app/workflows/mockworkflows.tsx b/keep-ui/app/workflows/mockworkflows.tsx
index 2ca2a81ca..0ea737931 100644
--- a/keep-ui/app/workflows/mockworkflows.tsx
+++ b/keep-ui/app/workflows/mockworkflows.tsx
@@ -24,7 +24,7 @@ export function WorkflowSteps({ workflow }: { workflow: MockWorkflow }) {
return (
<>
{provider && (
-
+
{index > 0 && (
)}
@@ -48,7 +48,7 @@ export function WorkflowSteps({ workflow }: { workflow: MockWorkflow }) {
return (
<>
{provider && (
-
+
{(index > 0 || isStepPresent) && (
)}
diff --git a/keep-ui/app/workflows/workflow-menu.tsx b/keep-ui/app/workflows/workflow-menu.tsx
index 732ff3d68..d13e85aa7 100644
--- a/keep-ui/app/workflows/workflow-menu.tsx
+++ b/keep-ui/app/workflows/workflow-menu.tsx
@@ -11,10 +11,8 @@ interface WorkflowMenuProps {
onView?: () => void;
onDownload?: () => void;
onBuilder?: () => void;
- allProvidersInstalled: boolean;
- hasManualTrigger: boolean;
- hasAlertTrigger: boolean;
- isWorkflowDisabled:boolean
+ isRunButtonDisabled: boolean;
+ runButtonToolTip?: string;
}
@@ -24,24 +22,13 @@ export default function WorkflowMenu({
onView,
onDownload,
onBuilder,
- allProvidersInstalled,
- hasManualTrigger,
- hasAlertTrigger,
- isWorkflowDisabled,
+ isRunButtonDisabled,
+ runButtonToolTip,
}: WorkflowMenuProps) {
- const getDisabledTooltip = () => {
- if (!allProvidersInstalled) return "Not all providers are installed.";
- if (!hasManualTrigger) return "No manual trigger available.";
- if (isWorkflowDisabled) return "Workflow is disabled";
- return "";
- };
const stopPropagation = (e: React.MouseEvent
) => {
e.stopPropagation();
};
- const isRunButtonDisabled = !allProvidersInstalled || (!hasManualTrigger && !hasAlertTrigger) || isWorkflowDisabled;
-
-
return (
diff --git a/keep-ui/app/workflows/workflow-tile.tsx b/keep-ui/app/workflows/workflow-tile.tsx
index 28307ff5d..c462454d1 100644
--- a/keep-ui/app/workflows/workflow-tile.tsx
+++ b/keep-ui/app/workflows/workflow-tile.tsx
@@ -40,6 +40,7 @@ import {
MdOutlineKeyboardArrowLeft,
} from "react-icons/md";
import { HiBellAlert } from "react-icons/hi2";
+import { useWorkflowRun } from "utils/hooks/useWorkflowRun";
function WorkflowMenuSection({
onDelete,
@@ -47,28 +48,18 @@ function WorkflowMenuSection({
onDownload,
onView,
onBuilder,
- workflow,
+ isRunButtonDisabled,
+ runButtonToolTip,
}: {
onDelete: () => Promise;
onRun: () => Promise;
onDownload: () => void;
onView: () => void;
onBuilder: () => void;
- workflow: Workflow;
+ isRunButtonDisabled: boolean;
+ runButtonToolTip?: string;
}) {
// Determine if all providers are installed
- const allProvidersInstalled = workflow.providers.every(
- (provider) => provider.installed
- );
-
- // Check if there is a manual trigger
- const hasManualTrigger = workflow.triggers.some(
- (trigger) => trigger.type === "manual"
- ); // Replace 'manual' with the actual value that represents a manual trigger in your data
-
- const hasAlertTrigger = workflow.triggers.some(
- (trigger) => trigger.type === "alert"
- );
return (
);
}
@@ -282,11 +271,7 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
);
const [formValues, setFormValues] = useState<{ [key: string]: string }>({});
const [formErrors, setFormErrors] = useState<{ [key: string]: string }>({});
- const [isRunning, setIsRunning] = useState(false);
- const [isAlertTriggerModalOpen, setIsAlertTriggerModalOpen] = useState(false);
- const [alertFilters, setAlertFilters] = useState([]);
- const [alertDependencies, setAlertDependencies] = useState([]);
const [openTriggerModal, setOpenTriggerModal] = useState(false);
const alertSource = workflow?.triggers
?.find((w) => w.type === "alert")
@@ -294,6 +279,13 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
const [fallBackIcon, setFallBackIcon] = useState(false);
const { providers } = useFetchProviders();
+ const {
+ isRunning,
+ handleRunClick,
+ getTriggerModalProps,
+ isRunButtonDisabled,
+ message,
+ } = useWorkflowRun(workflow!);
const handleConnectProvider = (provider: FullProvider) => {
setSelectedProvider(provider);
@@ -317,89 +309,6 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
setFormErrors(updatedFormErrors);
};
- // todo: this logic should move to the backend
- function extractAlertDependencies(workflowRaw: string): string[] {
- const dependencyRegex = /(?((acc, dep) => {
- // Ensure 'dep' is treated as a string
- const match = dep.match(/alert\.([\w.]+)/);
- if (match) {
- acc.push(match[1]);
- }
- return acc;
- }, []);
-
- return uniqueDependencies;
- }
-
- const runWorkflow = async (payload: object) => {
- try {
- setIsRunning(true);
- const response = await fetch(`${apiUrl}/workflows/${workflow.id}/run`, {
- method: "POST",
- headers: {
- Authorization: `Bearer ${session?.accessToken}`,
- "Content-Type": "application/json",
- },
- body: JSON.stringify(payload),
- });
-
- if (response.ok) {
- // Workflow started successfully
- const responseData = await response.json();
- const { workflow_execution_id } = responseData;
- setIsRunning(false);
- router.push(`/workflows/${workflow.id}/runs/${workflow_execution_id}`);
- } else {
- console.error("Failed to start workflow");
- }
- } catch (error) {
- console.error("An error occurred while starting workflow", error);
- }
- setIsRunning(false);
- };
-
- const handleRunClick = async () => {
- const hasAlertTrigger = workflow.triggers.some(
- (trigger) => trigger.type === "alert"
- );
-
- // if it needs alert payload, than open the modal
- if (hasAlertTrigger) {
- // extract the filters
- // TODO: support more than one trigger
- for (const trigger of workflow.triggers) {
- // at least one trigger is alert, o/w hasAlertTrigger was false
- if (trigger.type === "alert") {
- const staticAlertFilters = trigger.filters || [];
- setAlertFilters(staticAlertFilters);
- break;
- }
- }
- const dependencies = extractAlertDependencies(workflow.workflow_raw);
- setAlertDependencies(dependencies);
- setIsAlertTriggerModalOpen(true);
- return;
- }
- // else, manual trigger, just run it
- else {
- runWorkflow({});
- }
- };
-
- const handleAlertTriggerModalSubmit = (payload: any) => {
- runWorkflow(payload); // Function to run the workflow with the payload
- };
-
const handleDeleteClick = async () => {
try {
const response = await fetch(`${apiUrl}/workflows/${workflow.id}`, {
@@ -649,13 +558,14 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
}}
>
- {WorkflowMenuSection({
+ {!!handleRunClick && WorkflowMenuSection({
onDelete: handleDeleteClick,
onRun: handleRunClick,
onDownload: handleDownloadClick,
onView: handleViewClick,
onBuilder: handleBuilderClick,
- workflow,
+ runButtonToolTip: message,
+ isRunButtonDisabled: !!isRunButtonDisabled,
})}
@@ -765,13 +675,9 @@ function WorkflowTile({ workflow }: { workflow: Workflow }) {
- setIsAlertTriggerModalOpen(false)}
- onSubmit={handleAlertTriggerModalSubmit}
- staticFields={alertFilters}
- dependencies={alertDependencies}
- />
+ {!!getTriggerModalProps && }
{
@@ -813,13 +719,15 @@ export function WorkflowTileOld({ workflow }: { workflow: Workflow }) {
);
const [formValues, setFormValues] = useState<{ [key: string]: string }>({});
const [formErrors, setFormErrors] = useState<{ [key: string]: string }>({});
- const [isRunning, setIsRunning] = useState(false);
- const [isAlertTriggerModalOpen, setIsAlertTriggerModalOpen] = useState(false);
-
- const [alertFilters, setAlertFilters] = useState([]);
- const [alertDependencies, setAlertDependencies] = useState([]);
const { providers } = useFetchProviders();
+ const {
+ isRunning,
+ handleRunClick,
+ isRunButtonDisabled,
+ message,
+ getTriggerModalProps,
+ } = useWorkflowRun(workflow!);
const handleConnectProvider = (provider: FullProvider) => {
setSelectedProvider(provider);
@@ -843,88 +751,6 @@ export function WorkflowTileOld({ workflow }: { workflow: Workflow }) {
setFormErrors(updatedFormErrors);
};
- // todo: this logic should move to the backend
- function extractAlertDependencies(workflowRaw: string): string[] {
- const dependencyRegex = /(?((acc, dep) => {
- // Ensure 'dep' is treated as a string
- const match = dep.match(/alert\.([\w.]+)/);
- if (match) {
- acc.push(match[1]);
- }
- return acc;
- }, []);
-
- return uniqueDependencies;
- }
-
- const runWorkflow = async (payload: object) => {
- try {
- setIsRunning(true);
- const response = await fetch(`${apiUrl}/workflows/${workflow.id}/run`, {
- method: "POST",
- headers: {
- Authorization: `Bearer ${session?.accessToken}`,
- "Content-Type": "application/json",
- },
- body: JSON.stringify(payload),
- });
-
- if (response.ok) {
- // Workflow started successfully
- const responseData = await response.json();
- const { workflow_execution_id } = responseData;
- setIsRunning(false);
- router.push(`/workflows/${workflow.id}/runs/${workflow_execution_id}`);
- } else {
- console.error("Failed to start workflow");
- }
- } catch (error) {
- console.error("An error occurred while starting workflow", error);
- }
- setIsRunning(false);
- };
-
- const handleRunClick = async () => {
- const hasAlertTrigger = workflow.triggers.some(
- (trigger) => trigger.type === "alert"
- );
-
- // if it needs alert payload, than open the modal
- if (hasAlertTrigger) {
- // extract the filters
- // TODO: support more than one trigger
- for (const trigger of workflow.triggers) {
- // at least one trigger is alert, o/w hasAlertTrigger was false
- if (trigger.type === "alert") {
- const staticAlertFilters = trigger.filters || [];
- setAlertFilters(staticAlertFilters);
- break;
- }
- }
- const dependencies = extractAlertDependencies(workflow.workflow_raw);
- setAlertDependencies(dependencies);
- setIsAlertTriggerModalOpen(true);
- return;
- }
- // else, manual trigger, just run it
- else {
- runWorkflow({});
- }
- };
-
- const handleAlertTriggerModalSubmit = (payload: any) => {
- runWorkflow(payload); // Function to run the workflow with the payload
- };
const handleDeleteClick = async () => {
try {
@@ -1028,13 +854,14 @@ export function WorkflowTileOld({ workflow }: { workflow: Workflow }) {
{workflow.name}
- {WorkflowMenuSection({
+ {!!handleRunClick && WorkflowMenuSection({
onDelete: handleDeleteClick,
onRun: handleRunClick,
onDownload: handleDownloadClick,
onView: handleViewClick,
onBuilder: handleBuilderClick,
- workflow,
+ runButtonToolTip: message,
+ isRunButtonDisabled: !!isRunButtonDisabled,
})}
@@ -1186,13 +1013,9 @@ export function WorkflowTileOld({ workflow }: { workflow: Workflow }) {
)}
-
setIsAlertTriggerModalOpen(false)}
- onSubmit={handleAlertTriggerModalSubmit}
- staticFields={alertFilters}
- dependencies={alertDependencies}
- />
+ {!!getTriggerModalProps && }
);
}
diff --git a/keep-ui/utils/hooks/useWorkflowRun.ts b/keep-ui/utils/hooks/useWorkflowRun.ts
index 4edc9f438..a35ca86bc 100644
--- a/keep-ui/utils/hooks/useWorkflowRun.ts
+++ b/keep-ui/utils/hooks/useWorkflowRun.ts
@@ -1,59 +1,159 @@
-import { WorkflowExecutionFailure, WorkflowExecution } from "app/workflows/builder/types";
+import { useState } from "react";
import { useSession } from "next-auth/react";
-import { useEffect, useState } from "react";
import { getApiURL } from "utils/apiUrl";
+import { useRouter } from "next/navigation";
+import { Filter, Workflow } from "app/workflows/models";
-export const useWorkflowRun = (workflowRaw: string) => {
- const [runningWorkflowExecution, setRunningWorkflowExecution] = useState<
- WorkflowExecution | WorkflowExecutionFailure | null
- >(null);
- const [runModalOpen, setRunModalOpen] = useState(false);
- const [loading, setLoading] = useState(true);
+export const useWorkflowRun = (workflow: Workflow) => {
+
+ const router = useRouter();
+ const [isRunning, setIsRunning] = useState(false);
const { data: session, status, update } = useSession();
const accessToken = session?.accessToken;
+ const [isAlertTriggerModalOpen, setIsAlertTriggerModalOpen] = useState(false);
+ let message = ""
+ const [alertFilters, setAlertFilters] = useState
([]);
+ const [alertDependencies, setAlertDependencies] = useState([]);
const apiUrl = getApiURL();
- const url = `${apiUrl}/workflows/test`;
- const method = "POST";
- const headers = {
- "Content-Type": "text/html",
- Authorization: `Bearer ${accessToken}`,
+
+ if (!workflow) {
+ return {};
+ }
+ const allProvidersInstalled = workflow?.providers?.every(
+ (provider) => provider.installed
+ );
+
+ // Check if there is a manual trigger
+ const hasManualTrigger = workflow?.triggers?.some(
+ (trigger) => trigger.type === "manual"
+ ); // Replace 'manual' with the actual value that represents a manual trigger in your data
+
+ const hasAlertTrigger = workflow?.triggers?.some(
+ (trigger) => trigger.type === "alert"
+ );
+
+ const isWorkflowDisabled = !!workflow?.disabled
+
+ const getDisabledTooltip = () => {
+ if (!allProvidersInstalled) return "Not all providers are installed.";
+ if (!hasManualTrigger) return "No manual trigger available.";
+ if(isWorkflowDisabled) {
+ return "Workflow is Disabled";
+ }
+ return message;
+ };
+
+ const isRunButtonDisabled = isWorkflowDisabled || !allProvidersInstalled || (!hasManualTrigger && !hasAlertTrigger);
+
+ if (isRunButtonDisabled) {
+ message = getDisabledTooltip();
+ }
+ function extractAlertDependencies(workflowRaw: string): string[] {
+ const dependencyRegex = /(?((acc, dep) => {
+ // Ensure 'dep' is treated as a string
+ const match = dep.match(/alert\.([\w.]+)/);
+ if (match) {
+ acc.push(match[1]);
+ }
+ return acc;
+ }, []);
+
+ return uniqueDependencies;
+ }
+
+ const runWorkflow = async (payload: object) => {
+ try {
+ if (!workflow) {
+ return;
+ }
+ setIsRunning(true);
+ const response = await fetch(`${apiUrl}/workflows/${workflow?.id}/run`, {
+ method: "POST",
+ headers: {
+ Authorization: `Bearer ${accessToken}`,
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(payload),
+ });
+
+ if (response.ok) {
+ // Workflow started successfully
+ const responseData = await response.json();
+ const { workflow_execution_id } = responseData;
+ router.push(`/workflows/${workflow?.id}/runs/${workflow_execution_id}`);
+ } else {
+ console.error("Failed to start workflow");
+ }
+ } catch (error) {
+ console.error("An error occurred while starting workflow", error);
+ } finally {
+ setIsRunning(false);
+ }
+ setIsRunning(false);
};
- useEffect(() => {
- if (runModalOpen) {
- const body = workflowRaw;
- setLoading(true);
- fetch(url, { method, headers, body })
- .then((response) => {
- if (response.ok) {
- response.json().then((data) => {
- setRunningWorkflowExecution({
- ...data,
- });
- });
- } else {
- response.json().then((data) => {
- setRunningWorkflowExecution({
- error: data?.detail ?? "Unknown error",
- });
- });
- }
- })
- .catch((error) => {
- alert(`Error: ${error}`);
- setRunModalOpen(false);
- }).finally(()=>{
- setLoading(false);
- })
+ const handleRunClick = async () => {
+ if (!workflow) {
+ return;
+ }
+ const hasAlertTrigger = workflow?.triggers?.some(
+ (trigger) => trigger.type === "alert"
+ );
+
+ // if it needs alert payload, than open the modal
+ if (hasAlertTrigger) {
+ // extract the filters
+ // TODO: support more than one trigger
+ for (const trigger of workflow?.triggers) {
+ // at least one trigger is alert, o/w hasAlertTrigger was false
+ if (trigger.type === "alert") {
+ const staticAlertFilters = trigger.filters || [];
+ setAlertFilters(staticAlertFilters);
+ break;
+ }
+ }
+ const dependencies = extractAlertDependencies(workflow?.workflow_raw);
+ setAlertDependencies(dependencies);
+ setIsAlertTriggerModalOpen(true);
+ return;
+ }
+ // else, manual trigger, just run it
+ else {
+ runWorkflow({});
}
- }, [workflowRaw, runModalOpen])
+ };
+
+ const handleAlertTriggerModalSubmit = (payload: any) => {
+ runWorkflow(payload); // Function to run the workflow with the payload
+ };
+
+
+ const getTriggerModalProps = () => {
+ return {
+ isOpen: isAlertTriggerModalOpen,
+ onClose: () => setIsAlertTriggerModalOpen(false),
+ onSubmit: handleAlertTriggerModalSubmit,
+ staticFields: alertFilters,
+ dependencies: alertDependencies
+ }
+ }
return {
- loading,
- runModalOpen,
- setRunModalOpen,
- runningWorkflowExecution,
- setRunningWorkflowExecution,
+ handleRunClick,
+ isRunning,
+ getTriggerModalProps,
+ isRunButtonDisabled,
+ message
}
};
diff --git a/keep/api/models/workflow.py b/keep/api/models/workflow.py
index 8c6b75314..26d28f2e1 100644
--- a/keep/api/models/workflow.py
+++ b/keep/api/models/workflow.py
@@ -29,7 +29,7 @@ class WorkflowDTO(BaseModel):
creation_time: datetime
triggers: List[dict] = None
interval: int
- disabled:bool
+ disabled: bool = False
last_execution_time: datetime = None
last_execution_status: str = None
providers: List[ProviderDTO]
diff --git a/keep/api/routes/workflows.py b/keep/api/routes/workflows.py
index 3838b12b7..47d44f6ae 100644
--- a/keep/api/routes/workflows.py
+++ b/keep/api/routes/workflows.py
@@ -30,7 +30,6 @@
from keep.api.core.db import get_workflow_executions as get_workflow_executions_db
from keep.api.models.alert import AlertDto
from keep.api.models.workflow import (
- ProviderDTO,
WorkflowCreateOrUpdateDTO,
WorkflowDTO,
WorkflowExecutionDTO,
@@ -41,7 +40,6 @@
from keep.identitymanager.authenticatedentity import AuthenticatedEntity
from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory
from keep.parser.parser import Parser
-from keep.providers.providers_factory import ProvidersFactory
from keep.workflowmanager.workflowmanager import WorkflowManager
from keep.workflowmanager.workflowstore import WorkflowStore
@@ -72,7 +70,6 @@ def get_workflows(
) -> list[WorkflowDTO] | list[dict]:
tenant_id = authenticated_entity.tenant_id
workflowstore = WorkflowStore()
- parser = Parser()
workflows_dto = []
installed_providers = get_installed_providers(tenant_id)
installed_providers_by_type = {}
@@ -109,57 +106,12 @@ def get_workflows(
last_execution_started = None
try:
- workflow_yaml = yaml.safe_load(workflow.workflow_raw)
- providers = parser.get_providers_from_workflow(workflow_yaml)
- except Exception:
- logger.exception("Failed to parse workflow", extra={"workflow": workflow})
- continue
- providers_dto = []
- # get the provider details
- for provider in providers:
- try:
- provider = installed_providers_by_type[provider.get("type")][
- provider.get("name")
- ]
- provider_dto = ProviderDTO(
- name=provider.name,
- type=provider.type,
- id=provider.id,
- installed=True,
- )
- providers_dto.append(provider_dto)
- except KeyError:
- # the provider is not installed, now we want to check:
- # 1. if the provider requires any config - so its not instaleld
- # 2. if the provider does not require any config - consider it as installed
- try:
- conf = ProvidersFactory.get_provider_required_config(
- provider.get("type")
- )
- except ModuleNotFoundError:
- logger.warning(
- "Someone tried to use a non-existing provider in a workflow",
- extra={"provider": provider.get("type")},
- )
- conf = None
- if conf:
- provider_dto = ProviderDTO(
- name=provider.get("name"),
- type=provider.get("type"),
- id=None,
- installed=False,
- )
- # if the provider does not require any config, consider it as installed
- else:
- provider_dto = ProviderDTO(
- name=provider.get("name"),
- type=provider.get("type"),
- id=None,
- installed=True,
- )
- providers_dto.append(provider_dto)
-
- triggers = parser.get_triggers_from_workflow(workflow_yaml)
+ providers_dto, triggers = workflowstore.get_workflow_meta_data(
+ tenant_id=tenant_id, workflow=workflow, installed_providers_by_type=installed_providers_by_type)
+ except Exception as e:
+ logger.error(f"Error fetching workflow meta data: {e}")
+ providers_dto, triggers = [], [] # Default in case of failure
+
# create the workflow DTO
workflow_dto = WorkflowDTO(
id=workflow.id,
@@ -549,6 +501,17 @@ def get_workflow_by_id(
) -> WorkflowExecutionsPaginatedResultsDto:
tenant_id = authenticated_entity.tenant_id
workflow = get_workflow(tenant_id=tenant_id, workflow_id=workflow_id)
+ installed_providers = get_installed_providers(tenant_id)
+ installed_providers_by_type = {}
+ for installed_provider in installed_providers:
+ if installed_provider.type not in installed_providers_by_type:
+ installed_providers_by_type[installed_provider.type] = {
+ installed_provider.name: installed_provider
+ }
+ else:
+ installed_providers_by_type[installed_provider.type][
+ installed_provider.name
+ ] = installed_provider
with tracer.start_as_current_span("get_workflow_executions"):
total_count, workflow_executions, pass_count, fail_count, avgDuration = (
@@ -577,6 +540,27 @@ def get_workflow_by_id(
}
workflow_executions_dtos.append(workflow_execution_dto)
+ workflowstore = WorkflowStore()
+ try:
+ providers_dto, triggers = workflowstore.get_workflow_meta_data(
+ tenant_id=tenant_id, workflow=workflow, installed_providers_by_type=installed_providers_by_type)
+ except Exception as e:
+ logger.error(f"Error fetching workflow meta data: {e}")
+ providers_dto, triggers = [], [] # Default in case of failure
+
+ final_workflow = WorkflowDTO(
+ id=workflow.id,
+ name=workflow.name,
+ description=workflow.description or "[This workflow has no description]",
+ created_by=workflow.created_by,
+ creation_time=workflow.creation_time,
+ interval=workflow.interval,
+ providers=providers_dto,
+ triggers=triggers,
+ workflow_raw=workflow.workflow_raw,
+ last_updated=workflow.last_updated,
+ disabled=workflow.is_disabled,
+ )
return WorkflowExecutionsPaginatedResultsDto(
limit=limit,
offset=offset,
@@ -585,7 +569,7 @@ def get_workflow_by_id(
passCount=pass_count,
failCount=fail_count,
avgDuration=avgDuration,
- workflow=workflow,
+ workflow=final_workflow
)
diff --git a/keep/api/utils/pagination.py b/keep/api/utils/pagination.py
index 2bdc5207f..fb633a5eb 100644
--- a/keep/api/utils/pagination.py
+++ b/keep/api/utils/pagination.py
@@ -5,6 +5,7 @@
from keep.api.models.alert import IncidentDto, AlertDto
from keep.api.models.workflow import (
WorkflowExecutionDTO,
+ WorkflowDTO
)
from keep.api.models.db.workflow import * # pylint: disable=unused-wildcard-import
from typing import Optional
@@ -28,5 +29,5 @@ class WorkflowExecutionsPaginatedResultsDto(PaginatedResultsDto):
items: list[WorkflowExecutionDTO]
passCount: int = 0
avgDuration: float = 0.0
- workflow: Optional[Workflow] = None
+ workflow: Optional[WorkflowDTO] = None
failCount: int = 0
diff --git a/keep/workflowmanager/workflowstore.py b/keep/workflowmanager/workflowstore.py
index 7eda1943c..cfeb7021d 100644
--- a/keep/workflowmanager/workflowstore.py
+++ b/keep/workflowmanager/workflowstore.py
@@ -22,6 +22,10 @@
from keep.api.models.db.workflow import Workflow as WorkflowModel
from keep.parser.parser import Parser
from keep.workflowmanager.workflow import Workflow
+from keep.providers.providers_factory import ProvidersFactory
+from keep.api.models.workflow import (
+ ProviderDTO,
+)
class WorkflowStore:
@@ -331,3 +335,63 @@ def group_last_workflow_executions(self, workflows: list[dict]) -> list[dict]:
]
return results
+
+ def get_workflow_meta_data(self, tenant_id: str, workflow: dict, installed_providers_by_type: dict):
+ providers_dto = []
+ triggers = []
+
+ # Early return if workflow is None
+ if workflow is None:
+ return providers_dto, triggers
+
+ # Step 1: Load workflow YAML and handle potential parsing errors more thoroughly
+ try:
+ workflow_raw_data = workflow.workflow_raw
+ if not isinstance(workflow_raw_data, str):
+ self.logger.error(f"workflow_raw is not a string workflow: {workflow}")
+ return providers_dto, triggers
+
+ # Parse the workflow YAML safely
+ workflow_yaml = yaml.safe_load(workflow_raw_data)
+ if not workflow_yaml:
+ self.logger.error(f"Parsed workflow_yaml is empty or invalid: {workflow_raw_data}")
+ return providers_dto, triggers
+
+ providers = self.parser.get_providers_from_workflow(workflow_yaml)
+ except Exception as e:
+ # Improved logging to capture more details about the error
+ self.logger.error(f"Failed to parse workflow in get_workflow_meta_data: {e}, workflow: {workflow}")
+ return providers_dto, triggers # Return empty providers and triggers in case of error
+
+ # Step 2: Process providers and add them to DTO
+ for provider in providers:
+ try:
+ provider_data = installed_providers_by_type[provider.get("type")][provider.get("name")]
+ provider_dto = ProviderDTO(
+ name=provider_data.name,
+ type=provider_data.type,
+ id=provider_data.id,
+ installed=True,
+ )
+ providers_dto.append(provider_dto)
+ except KeyError:
+ # Handle case where the provider is not installed
+ try:
+ conf = ProvidersFactory.get_provider_required_config(provider.get("type"))
+ except ModuleNotFoundError:
+ self.logger.warning(f"Non-existing provider in workflow: {provider.get('type')}")
+ conf = None
+
+ # Handle providers based on whether they require config
+ provider_dto = ProviderDTO(
+ name=provider.get("name"),
+ type=provider.get("type"),
+ id=None,
+ installed=(conf is None), # Consider it installed if no config is required
+ )
+ providers_dto.append(provider_dto)
+
+ # Step 3: Extract triggers from workflow
+ triggers = self.parser.get_triggers_from_workflow(workflow_yaml)
+
+ return providers_dto, triggers
\ No newline at end of file
From 9d09a76e5db2f0e38a7c01ebf44dc1729bc04039 Mon Sep 17 00:00:00 2001
From: Tal
Date: Wed, 11 Sep 2024 18:06:03 +0300
Subject: [PATCH 07/10] fix: workflow interval can be None (#1904)
---
keep/api/models/workflow.py | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/keep/api/models/workflow.py b/keep/api/models/workflow.py
index 26d28f2e1..6beb16c80 100644
--- a/keep/api/models/workflow.py
+++ b/keep/api/models/workflow.py
@@ -28,7 +28,7 @@ class WorkflowDTO(BaseModel):
created_by: str
creation_time: datetime
triggers: List[dict] = None
- interval: int
+ interval: int | None = None
disabled: bool = False
last_execution_time: datetime = None
last_execution_status: str = None
@@ -70,7 +70,14 @@ def manipulate_raw(cls, raw, values):
ordered_raw["disabled"] = d.get("disabled")
ordered_raw["triggers"] = d.get("triggers")
for key, val in d.items():
- if key not in ["id", "description", "disabled", "triggers", "steps", "actions"]:
+ if key not in [
+ "id",
+ "description",
+ "disabled",
+ "triggers",
+ "steps",
+ "actions",
+ ]:
ordered_raw[key] = val
# than steps and actions
ordered_raw["steps"] = d.get("steps")
From b51a1edd29dd22f2d54e1d2952cc6cd47e2b3c6c Mon Sep 17 00:00:00 2001
From: Shahar Glazner
Date: Thu, 12 Sep 2024 10:47:54 +0300
Subject: [PATCH 08/10] chore(release): 0.24.2 (#1909)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 5d20a639c..da847d461 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "keep"
-version = "0.24.1"
+version = "0.24.2"
description = "Alerting. for developers, by developers."
authors = ["Keep Alerting LTD"]
readme = "README.md"
From 62d5ed66da9f6d3db8039236033249a756341063 Mon Sep 17 00:00:00 2001
From: Shahar Glazner
Date: Thu, 12 Sep 2024 11:10:44 +0300
Subject: [PATCH 09/10] feat: inject env vars into provider provision json
(#1910)
---
keep/parser/parser.py | 46 +++++++++++++++++++++++++++++++++++++++----
pyproject.toml | 2 +-
2 files changed, 43 insertions(+), 5 deletions(-)
diff --git a/keep/parser/parser.py b/keep/parser/parser.py
index 33ae67c7e..b274a36b2 100644
--- a/keep/parser/parser.py
+++ b/keep/parser/parser.py
@@ -256,7 +256,9 @@ def _parse_providers_from_env(self, context_manager: ContextManager):
self.logger.debug(
"Parsing providers from KEEP_PROVIDERS environment variable"
)
- context_manager.providers_context.update(json.loads(providers_json))
+ providers_dict = json.loads(providers_json)
+ self._inject_env_variables(providers_dict)
+ context_manager.providers_context.update(providers_dict)
self.logger.debug(
"Providers parsed successfully from KEEP_PROVIDERS environment variable"
)
@@ -275,6 +277,7 @@ def _parse_providers_from_env(self, context_manager: ContextManager):
self.logger.debug(f"Parsing provider {provider_name} from {env}")
# {'authentication': {'webhook_url': 'https://hooks.slack.com/services/...'}}
provider_config = json.loads(os.environ.get(env))
+ self._inject_env_variables(provider_config)
context_manager.providers_context[provider_name] = provider_config
self.logger.debug(
f"Provider {provider_name} parsed successfully from {env}"
@@ -284,6 +287,28 @@ def _parse_providers_from_env(self, context_manager: ContextManager):
f"Error parsing provider config from environment variable {env}"
)
+ def _inject_env_variables(self, config):
+ """
+ Recursively inject environment variables into the config.
+ """
+ if isinstance(config, dict):
+ for key, value in config.items():
+ config[key] = self._inject_env_variables(value)
+ elif isinstance(config, list):
+ return [self._inject_env_variables(item) for item in config]
+ elif (
+ isinstance(config, str) and config.startswith("$(") and config.endswith(")")
+ ):
+ env_var = config[2:-1]
+ env_var_val = os.environ.get(env_var)
+ if not env_var_val:
+ self.logger.warning(
+ f"Environment variable {env_var} not found while injecting into config"
+ )
+ return config
+ return env_var_val
+ return config
+
def _parse_providers_from_workflow(
self, context_manager: ContextManager, workflow: dict
) -> typing.List[BaseProvider]:
@@ -328,7 +353,14 @@ def parse_interval(self, workflow) -> int:
@staticmethod
def parse_disabled(workflow_dict: dict) -> bool:
workflow_is_disabled_in_yml = workflow_dict.get("disabled")
- return True if (workflow_is_disabled_in_yml == "true" or workflow_is_disabled_in_yml is True) else False
+ return (
+ True
+ if (
+ workflow_is_disabled_in_yml == "true"
+ or workflow_is_disabled_in_yml is True
+ )
+ else False
+ )
@staticmethod
def parse_provider_parameters(provider_parameters: dict) -> dict:
@@ -538,7 +570,9 @@ def _merge_action_by_use(
extended_action = actions_context.get(action.get("use"), {})
yield ParserUtils.deep_merge(action, extended_action)
- def _get_on_failure_action(self, context_manager: ContextManager, workflow: dict) -> Step | None:
+ def _get_on_failure_action(
+ self, context_manager: ContextManager, workflow: dict
+ ) -> Step | None:
"""
Parse the on-failure action
@@ -552,7 +586,11 @@ def _get_on_failure_action(self, context_manager: ContextManager, workflow: dict
self.logger.debug("Parsing on-failure")
workflow_on_failure = workflow.get("on-failure", {})
if workflow_on_failure:
- parsed_action = self._get_action(context_manager=context_manager, action=workflow_on_failure, action_name="on-failure")
+ parsed_action = self._get_action(
+ context_manager=context_manager,
+ action=workflow_on_failure,
+ action_name="on-failure",
+ )
self.logger.debug("Parsed on-failure successfully")
return parsed_action
self.logger.debug("No on-failure action")
diff --git a/pyproject.toml b/pyproject.toml
index da847d461..8833cb00e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "keep"
-version = "0.24.2"
+version = "0.24.3"
description = "Alerting. for developers, by developers."
authors = ["Keep Alerting LTD"]
readme = "README.md"
From 48d9826c42d4f3a97ed0bcf5b671ec4f94453651 Mon Sep 17 00:00:00 2001
From: Ezhil Shanmugham
Date: Fri, 13 Sep 2024 17:24:30 +0530
Subject: [PATCH 10/10] chore: update query_victoriametrics (#1892)
Co-authored-by: Shahar Glazner
---
examples/workflows/query_victoriametrics.yml | 35 ++++++++++++++++++--
1 file changed, 33 insertions(+), 2 deletions(-)
diff --git a/examples/workflows/query_victoriametrics.yml b/examples/workflows/query_victoriametrics.yml
index e4b3ad0d0..bfa5383c4 100644
--- a/examples/workflows/query_victoriametrics.yml
+++ b/examples/workflows/query_victoriametrics.yml
@@ -10,6 +10,37 @@ workflow:
config: "{{ providers.victoriametrics }}"
type: victoriametrics
with:
- query: process_memory_limit_bytes
+ query: avg(rate(process_cpu_seconds_total))
queryType: query
- actions: []
+
+ actions:
+ - name: trigger-slack1
+ condition:
+ - name: threshold-condition
+ type: threshold
+ value: "{{ steps.victoriametrics-step.results.data.result.0.value.1 }}"
+ compare_to: 0.0050
+ alias: A
+ operator: ">"
+ provider:
+ type: slack
+ config: "{{ providers.slack }}"
+ with:
+ message: "Result: {{ steps.victoriametrics-step.results.data.result.0.value.1 }} is greater than 0.0040! 🚨"
+
+ - name: trigger-slack2
+ if: "{{ A }}"
+ provider:
+ type: slack
+ config: "{{ providers.slack }}"
+ with:
+ message: "Result: {{ steps.victoriametrics-step.results.data.result.0.value.1 }} is greater than 0.0040! 🚨"
+
+ - name: trigger-ntfy
+ if: "{{ A }}"
+ provider:
+ type: ntfy
+ config: "{{ providers.ntfy }}"
+ with:
+ message: "Result: {{ steps.victoriametrics-step.results.data.result.0.value.1 }} is greater than 0.0040! 🚨"
+ topic: ezhil