Skip to content

Commit

Permalink
Cleaning
Browse files Browse the repository at this point in the history
  • Loading branch information
Matvey-Kuk committed Nov 20, 2024
1 parent 634780d commit 84b5eef
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 29 deletions.
8 changes: 4 additions & 4 deletions keep/api/background_server_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
import logging
import requests

from keep.api.core.demo_mode import launch_demo_mode
from keep.api.core.report_uptime import launch_uptime_reporting
from keep.api.core.demo_mode import launch_demo_mode_thread
from keep.api.core.report_uptime import launch_uptime_reporting_thread

logger = logging.getLogger(__name__)

Expand All @@ -38,8 +38,8 @@ def main():
time.sleep(5)

threads = []
threads.append(launch_demo_mode(keep_api_url))
threads.append(launch_uptime_reporting())
threads.append(launch_demo_mode_thread(keep_api_url))
threads.append(launch_uptime_reporting_thread())

for thread in threads:
if thread is not None:
Expand Down
2 changes: 1 addition & 1 deletion keep/api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import keep.api.logging
from keep.api.api import AUTH_TYPE
from keep.api.core.db_on_start import migrate_db, try_create_single_tenant
from keep.api.core.report_uptime import launch_uptime_reporting
from keep.api.core.report_uptime import launch_uptime_reporting_thread
from keep.api.core.dependencies import SINGLE_TENANT_UUID
from keep.identitymanager.identitymanagerfactory import IdentityManagerTypes

Expand Down
41 changes: 18 additions & 23 deletions keep/api/core/demo_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import threading
import time
from datetime import timezone
from uuid import uuid4

import requests
from dateutil import parser
Expand Down Expand Up @@ -69,7 +70,6 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="10.0.0.1",
mac_address="",
category="Python",
manufacturer="",
dependencies={
Expand All @@ -91,7 +91,6 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="10.0.0.2",
mac_address="",
category="nextjs",
manufacturer="",
dependencies={
Expand All @@ -112,7 +111,6 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="10.0.0.3",
mac_address="",
category="postgres",
manufacturer="",
dependencies={},
Expand All @@ -131,9 +129,7 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="10.0.0.4",
mac_address="",
category="Kafka",
manufacturer="",
dependencies={
"processor": "AMQP",
},
Expand All @@ -152,9 +148,7 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="10.0.0.5",
mac_address="",
category="go",
manufacturer="",
dependencies={
"storage": "HTTP/S",
},
Expand All @@ -173,9 +167,7 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="172.1.1.0",
mac_address="",
category="nextjs",
manufacturer="",
dependencies={
"api": "HTTP/S",
},
Expand All @@ -194,9 +186,7 @@
email="[email protected]",
slack="https://slack.keephq.dev",
ip_address="10.0.0.8",
mac_address="",
category="python",
manufacturer="",
dependencies={},
application_ids=[],
updated_at="2024-11-18T10:13:56",
Expand Down Expand Up @@ -225,7 +215,6 @@ def get_or_create_topology(keep_api_key, keep_api_url):
services_existing = services_existing.json()

# Creating services

if len(services_existing) == 0:
process_topology(
SINGLE_TENANT_UUID, services_to_create, "Prod-Datadog", "datadog"
Expand Down Expand Up @@ -301,20 +290,22 @@ def remove_old_incidents(keep_api_key, keep_api_url):
response.raise_for_status()


def get_existing_installed_providers(keep_api_key, keep_api_url):
def get_installed_providers(keep_api_key, keep_api_url):
response = requests.get(
f"{keep_api_url}/providers",
headers={"x-api-key": keep_api_key},
)
response.raise_for_status()
return response.json()['installed_providers']


def simulate_alerts(
keep_api_url=None,
keep_api_key=None,
sleep_interval=5,
demo_correlation_rules=False,
demo_topology=False,
clean_old_incidents=False,
):
logger.info("Simulating alerts...")

Expand All @@ -337,19 +328,20 @@ def simulate_alerts(
for provider in providers
}

existing_installed_providers = get_existing_installed_providers(keep_api_key, keep_api_url)
existing_installed_providers = get_installed_providers(keep_api_key, keep_api_url)
logger.info(f"Existing installed providers: {existing_installed_providers}")
existing_providers_to_their_ids = {}

for existing_provider in existing_installed_providers:
if existing_provider['type'] in providers:
existing_providers_to_their_ids[existing_provider['type']] = existing_provider['id']

logger.info(f"Existing installed existing_providers_to_their_ids: {existing_providers_to_their_ids}")

if demo_correlation_rules:
logger.info("Creating correlation rules...")
get_or_create_correlation_rules(keep_api_key, keep_api_url)
logger.info("Correlation rules created.")

if demo_topology:
logger.info("Creating topology...")
get_or_create_topology(keep_api_key, keep_api_url)
Expand All @@ -359,9 +351,10 @@ def simulate_alerts(
try:
logger.info("Looping to send alerts...")

logger.info("Removing old incidents...")
remove_old_incidents(keep_api_key, keep_api_url)
logger.info("Old incidents removed.")
if clean_old_incidents:
logger.info("Removing old incidents...")
remove_old_incidents(keep_api_key, keep_api_url)
logger.info("Old incidents removed.")

send_alert_url_params = {}

Expand All @@ -377,9 +370,7 @@ def simulate_alerts(
alert = provider.simulate_alert()

if provider_type in providers_to_randomize_fingerprint_for:
send_alert_url_params["fingerprint"] = "".join(
random.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=10)
)
send_alert_url_params["fingerprint"] = str(uuid4())

# Determine number of times to send the same alert
num_iterations = 1
Expand All @@ -390,17 +381,20 @@ def simulate_alerts(
logger.info("Sending alert: {}".format(alert))
try:
env = random.choice(["production", "staging", "development"])

if not "provider_id" in send_alert_url_params:
send_alert_url_params["provider_id"] = f"{provider_type}-{env}"

prepared_request = PreparedRequest()
prepared_request.prepare_url(send_alert_url, send_alert_url_params)
logger.info(f"Sending alert to {prepared_request.url} with url params {send_alert_url_params}")

response = requests.post(
prepared_request.url,
headers={"x-api-key": keep_api_key},
json=alert,
)
response.raise_for_status() # Raise an HTTPError for bad responses
response.raise_for_status()
except requests.exceptions.RequestException as e:
logger.error("Failed to send alert: {}".format(e))
time.sleep(sleep_interval)
Expand All @@ -421,7 +415,7 @@ def simulate_alerts(
time.sleep(sleep_interval)


def launch_demo_mode(keep_api_url=None) -> threading.Thread | None:
def launch_demo_mode_thread(keep_api_url=None) -> threading.Thread | None:
if not KEEP_LIVE_DEMO_MODE:
logger.info("Not launching the demo mode.")
return
Expand All @@ -447,6 +441,7 @@ def launch_demo_mode(keep_api_url=None) -> threading.Thread | None:
"sleep_interval": sleep_interval,
"demo_correlation_rules": True,
"demo_topology": True,
"clean_old_incidents": True,
},
)
thread.daemon = True
Expand Down
2 changes: 1 addition & 1 deletion keep/api/core/report_uptime.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def report_uptime_to_posthog():
# Important to keep it async, otherwise will clog main gunicorn thread and cause timeouts.
await asyncio.sleep(UPTIME_REPORTING_CADENCE)

def launch_uptime_reporting() -> threading.Thread | None:
def launch_uptime_reporting_thread() -> threading.Thread | None:
"""
Running async uptime reporting as a sub-thread.
"""
Expand Down
1 change: 1 addition & 0 deletions scripts/simulate_alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def main():
sleep_interval=SLEEP_INTERVAL,
demo_correlation_rules=args.full_demo,
demo_topology=args.full_demo,
clean_old_incidents=args.full_demo,
)

if __name__ == "__main__":
Expand Down

0 comments on commit 84b5eef

Please sign in to comment.