From 0740b73032ea11133263d9531031e46964330e33 Mon Sep 17 00:00:00 2001 From: Guillermo Mejias Date: Thu, 28 Nov 2024 12:53:10 +0100 Subject: [PATCH] adapted script to publish metrics for prometheus --- .gitignore | 4 +- Dockerfile | 8 +- README.md | 65 ++++++ main.py | 434 +++++++++++++++++++++++++++++++----- mainv2.py | 569 ----------------------------------------------- requirements.txt | 3 + 6 files changed, 455 insertions(+), 628 deletions(-) create mode 100644 README.md delete mode 100644 mainv2.py create mode 100644 requirements.txt diff --git a/.gitignore b/.gitignore index dc95cc6..ae79845 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,6 @@ focusing_metrics.log logs/ -data.json \ No newline at end of file +data.json + +venv/ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 2583fdb..d7d6557 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,10 +5,8 @@ RUN mkdir /app WORKDIR /app RUN pip install --upgrade pip -RUN pip install requests -COPY mainv2.py . +COPY . . +RUN pip install -r requirements.txt - - -ENTRYPOINT ["python", "mainv2.py"] +ENTRYPOINT ["python", "main.py"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..92d352c --- /dev/null +++ b/README.md @@ -0,0 +1,65 @@ + +# GH-MONIT + +![Latest release](https://img.shields.io/github/v/release/Gravitate-Health/gh-monit) +![Actions workflow](https://github.com/Gravitate-Health/gh-monit/actions/workflows/cicd.yml/badge.svg) +![Python](https://img.shields.io/badge/python-v3.8+-blue.svg) +[![License](https://img.shields.io/badge/license-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache) + + +## Table of contents + +- [GH-MONIT](#gh-monit) + - [Table of contents](#table-of-contents) + - [Introduction](#introduction) + - [Deployment](#deployment) + - [Kubernetes (Kustomize)](#kubernetes-kustomize) + - [Usage](#usage) + - [Known issues and limitations](#known-issues-and-limitations) + - [Getting help](#getting-help) + - [Contributing](#contributing) + - [Authors and history](#authors-and-history) + - [Acknowledgments](#acknowledgments) + + +## Introduction + +This repository includes an implementation of a monitor for Gravitate Health resources. It publishes metrics regarding the status of ePIs, preprocessors, and focusing status. + +## Deployment + +#### Kubernetes (Kustomize) + +Production: +```bash +kubectl apply -k kubernetes/base +``` + +Development: +```bash +kubectl apply -k kubernetes/dev +``` + + +Usage +----- + +Known issues and limitations +---------------------------- + +Getting help +------------ +In case you find a problem or you need extra help, please use the issues tab to report the issue. + +Contributing +------------ +To contribute, fork this repository and send a pull request with the changes squashed. + +Authors and history +--------------------------- +- João Almeida ([@joofio](https://github.com/joofio)) +- Guillermo Mejías ([@gmej](https://github.com/gmej)) + +Acknowledgments +--------------------------- +- [ORIGINAL DEVELOPMENT by @joofio](https://github.com/joofio/gh-monit) \ No newline at end of file diff --git a/main.py b/main.py index 0094622..920b383 100644 --- a/main.py +++ b/main.py @@ -1,9 +1,38 @@ -import socket +import logging +import os import time +from logging.handlers import RotatingFileHandler +from flask import Flask, Response +import threading +from requests_futures.sessions import FuturesSession + +app = Flask(__name__) +metrics = {} + import requests -import os -# Configuration +# Define the directory and ensure it exists +log_directory = "logs" +if not os.path.exists(log_directory): + os.makedirs(log_directory) +# Set up the logger +logger = logging.getLogger("my_logger") +logger.setLevel(logging.DEBUG) # Configuration + + +# Configure the rotating file handler +log_file = os.path.join(log_directory, "gh-monit.log") + +handler = RotatingFileHandler(log_file, maxBytes=10 * 1024 * 1024, backupCount=10) +handler.setLevel(logging.DEBUG) +# Create a formatter and set it for the handler +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") +handler.setFormatter(formatter) + +# Add the handler to the logger +logger.addHandler(handler) + + GRAPHITE_HOST = os.getenv( "GRAPHITE_HOST", "127.0.0.1" ) # "host.docker.internal" # Replace with your Graphite host @@ -11,7 +40,11 @@ "GRAPHITE_PORT", 2003 ) # Default port for Carbon plaintext protocol -print(GRAPHITE_HOST, GRAPHITE_PORT) +BASE_URL = os.getenv("BASE_URL", "https://gravitate-health.lst.tfo.upm.es/") +# print(GRAPHITE_HOST, GRAPHITE_PORT) +logger.debug( + f"BASEURL is {BASE_URL} and HOST IS {GRAPHITE_HOST} and port is {GRAPHITE_PORT}" +) LENSES = [ "lens-selector-mvp2_HIV", @@ -22,99 +55,394 @@ "lens-selector-mvp2_pregnancy", ] -PATIENT_IDS = ["alicia-1", "Cecilia-1", "Pedro-1"] +# LENSES = requests.post(BASE_URL + "/focusing/lenses") + + +def log_result( + status_code, + warnings, + method, + logger_method=["prometheus"], + timestamp=None, + bundleid=None, + lens=None, + pid=None, +): + """ + Logs a metric to be exposed for Prometheus. + """ + metric_path = f"""gh_focusing_{method}_{bundleid["name"]}_{pid}_{lens}""" + timestamp = timestamp or int(time.time()) + if status_code == 200 and not warnings: + value = 0 + elif status_code != 200: + value = 1 + logger.debug( + f"Value 1 for {status_code} and method {method} and bundle {bundleid} and pid {pid}" + ) + + elif status_code == 200 and warnings["preprocessingWarnings"]: + # print(warnings) + # print(warnings["preprocessingWarnings"]) + value = 2 + logger.debug( + f"Value 2 for {status_code} and {warnings} and method {method} and bundle {bundleid} and pid {pid}" + ) + + elif status_code == 200 and len(warnings["lensesWarnings"]) > 0: + value = 3 + logger.debug( + f"Value 3 for {status_code} and {warnings} and method {method} and bundle {bundleid} and pid {pid}" + ) + + else: + value = 4 + logger.debug( + f"Value 4 for {status_code} and {warnings} and method {method} and bundle {bundleid} and pid {pid}" + ) + + metrics[metric_path] = value + +@app.route('/metrics') +def metrics_endpoint(): + metrics_data = "\n".join([f"{key} {value}" for key, value in metrics.items()]) + print(metrics_data) + return Response(metrics_data, mimetype='text/plain') + +# print(LENSES) +PATIENT_IDS = [ + "alicia-1", + "Cecilia-1", + "Pedro-1", + "helen-1", + "maria-1", + "0101010101", + "ips-1", + "ips-2", + "ips-3", + "ips-4", +] BUNDLES = [ { "id": "bundlepackageleaflet-es-94a96e39cfdcd8b378d12dd4063065f9", - "name": "biktarvy", + "name": "biktarvy-es", + }, + { + "id": "bundlepackageleaflet-en-94a96e39cfdcd8b378d12dd4063065f9", + "name": "biktarvy-en", }, { "id": "bundlepackageleaflet-es-925dad38f0afbba36223a82b3a766438", - "name": "calcio", + "name": "calcio-es", }, { "id": "bundlepackageleaflet-es-2f37d696067eeb6daf1111cfc3272672", - "name": "tegretol", + "name": "tegretol-es", + }, + { + "id": "bundlepackageleaflet-en-2f37d696067eeb6daf1111cfc3272672", + "name": "tegretol-en", }, { "id": "bundlepackageleaflet-es-4fab126d28f65a1084e7b50a23200363", - "name": "xenical", + "name": "xenical-es", + }, + { + "id": "bundlepackageleaflet-en-4fab126d28f65a1084e7b50a23200363", + "name": "xenical-en", }, { "id": "bundlepackageleaflet-es-29436a85dac3ea374adb3fa64cfd2578", - "name": "hypericum", + "name": "hypericum-es", }, { "id": "bundlepackageleaflet-es-04c9bd6fb89d38b2d83eced2460c4dc1", - "name": "flucelvax", + "name": "flucelvax-es", + }, + { + "id": "bundlepackageleaflet-en-04c9bd6fb89d38b2d83eced2460c4dc1", + "name": "flucelvax-en", }, { "id": "bundlepackageleaflet-es-49178f16170ee8a6bc2a4361c1748d5f", - "name": "dovato", + "name": "dovato-es", + }, + { + "id": "bundlepackageleaflet-en-49178f16170ee8a6bc2a4361c1748d5f", + "name": "dovato-en", }, { "id": "bundlepackageleaflet-es-e762a2f54b0b24fca4744b1bb7524a5b", - "name": "mirtazapine", + "name": "mirtazapine-es", }, { "id": "bundlepackageleaflet-es-da0fc2395ce219262dfd4f0c9a9f72e1", - "name": "blaston", + "name": "blaston-es", + }, +] + +PREPROCBUNDLES = [ + { + "id": "processedbundlekarveabik", + "name": "biktarvy-en", }, { - "id": "bundlepackageleaflet-es-da0fc2395ce219262dfd4f0c9a9f72e1", - "name": "blaston", + "id": "bundleprocessed-es-b44cce291e466626afa836fffe72c350", + "name": "biktarvy-es", + }, + { + "id": "bundleprocessed-pt-b44cce291e466626afa836fffe72c350", + "name": "biktarvy-pt", + }, + { + "id": "processedbundlekarveacalcium", + "name": "calcium_pt", + }, + { + "id": "processedbundledovato-en", + "name": "dovato-en", + }, + { + "id": "processedbundledovato-es", + "name": "dovato-es", + }, + { + "id": "processedbundleflucelvax", + "name": "flucelvax-en", + }, + { + "id": "processedbundleflucelvaxES", + "name": "flucelvax-es", + }, + { + "id": "processedbundlehypericum", + "name": "hypericum-es", + }, + { + "id": "bundle-ibu-proc", + "name": "ibuprofen-en", + }, + { + "id": "Processedbundlekarvea", + "name": "karvea-en", + }, + { + "id": "bundle-processed-pt-2d49ae46735143c1323423b7aea24165", + "name": "karvea-pt", + }, + { + "id": "bundle-met-proc", + "name": "metformin-en", + }, + { + "id": "bundle-novo-proc", + "name": "novorapid-en", + }, + { + "id": "bundlepackageleaflet-es-proc-2f37d696067eeb6daf1111cfc3272672", + "name": "tegretrol-es", + }, + { + "id": "bundlepackageleaflet-es-proc-4fab126d28f65a1084e7b50a23200363", + "name": "xenical-es", }, ] -def send_to_graphite(metric_path, value, timestamp=None): - """ - Sends a metric to Graphite. - """ - timestamp = timestamp or int(time.time()) - message = f"{metric_path} {value} {timestamp}\n" - print(f"Sending to Graphite: {message}", end="") +def prepare_requests(BUNDLES, LENSES, PATIENT_IDS, BASE_URL, method): + session = FuturesSession() + requests_list = [] - # Open a socket to Graphite and send the data - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - sock.connect((GRAPHITE_HOST, int(GRAPHITE_PORT))) - sock.sendall(message.encode("utf-8")) + for bundleid in BUNDLES: + for lens in LENSES: + for pid in PATIENT_IDS: + WEBSITE_URL = ( + BASE_URL + + "focusing/focus/" + + bundleid["id"] + + "?preprocessors=preprocessing-service-manual&patientIdentifier=" + + pid + + "&lenses=" + + lens + ) + requests_list.append((session.post(WEBSITE_URL), bundleid, lens, pid, method)) + + return requests_list +def process_responses(requests_list): + for future, bundleid, lens, pid, method in requests_list: + response = future.result() + status_code, warnings = check_website_status(response) + log_result( + status_code=status_code, + warnings=warnings, + method=method, + bundleid=bundleid, + lens=lens, + pid=pid, + ) -def check_website_status(url): +def check_website_status(response): """ Checks the status code of a website. """ - try: - response = requests.post(url) - return response.status_code - except requests.RequestException as e: - print(f"Error checking website status: {e}") - return None + focusing_warnings = response.headers.get("gh-focusing-warnings") + + if response.status_code == 400: + logger.debug(f"Warning: {response.status_code} and {response.text}") + return response.status_code, {} + + elif focusing_warnings: + return response.status_code, eval(focusing_warnings) + else: + return response.status_code, {} + +def check_bundles_in_list(BASE_URL): + ENCHACED_WHITE_LIST = [ + "enhanced-bundlebik-alicia", + "enhanced-bundlekarveacalcium-alicia", + "enchanced-bundledovato-es", + "enchanced-bundledovato-en", + "enhanced-bundleflucelvax-alicia", + "enhanced-bundlehypericum-alicia", + "enhancedbundlekarvea-alicia", + "enhancedddbundlekarvea", + "enhanced-bundlebik-pedro", + "enhanced-bundlekarveacalcium-pedro", + "enchanced-bundledovato-pedro-en", + "enchanced-bundledovato-pedro-es", + "enhanced-bundleflucelvax-pedro", + "enhanced-bundlehypericum-pedro", + "enhancedbundlekarveaP", + ] + WEBSITE_URL = BASE_URL + "epi/api/fhir/Bundle" + print(WEBSITE_URL) + + next_url = WEBSITE_URL # Start with the initial URL + + while next_url: # Loop through bundles while 'next' link exists + response = requests.get(next_url) + + if response.status_code != 200: + print(f"Failed to fetch data: {response.status_code}") + break + + # Parse the JSON response + data = response.json() + + # Process entries in the current bundle + if "entry" in data: + for entry in data["entry"]: + bid = entry["resource"]["id"] + if bid in ENCHACED_WHITE_LIST: + continue + print(f"Processing bundle ID: {bid}") + + # Check the 'List' resource for the current bundle ID + nresponse = requests.get(f"{BASE_URL}epi/api/fhir/List?item={bid}") + + if nresponse.status_code != 200: + print(f"Failed to check List resource: {nresponse.status_code}") + continue + + if nresponse.json().get("total", 0) > 0: + value = 0 # If the resource is found + else: + value = 1 # If the resource is not found + + metric_path = f"gh.listmember.{bid}" + timestamp = int(time.time()) + + message = f"{metric_path} {value} {timestamp}\n" + # print(f"Sending to Graphite: {message}", end="") + print(message) + + # Open a socket to Graphite and send the data + # Uncomment and modify if sending to Graphite + # with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + # sock.connect((GRAPHITE_HOST, int(GRAPHITE_PORT))) + # sock.sendall(message.encode("utf-8")) + + # Check for the next link to paginate + next_url = None + for link in data.get("link", []): + if link["relation"] == "next": + next_url = link["url"] + print(f"Fetching next page: {next_url}") + break # We only need the first 'next' link + print("end") + return 1 + + +def chek_all_prpcessor_with_post_data(BUNDLES, PATIENT_IDS, BASE_URL): + for bundleid in BUNDLES: + bundleresp = requests.get(BASE_URL + "/Bundle/" + bundleid["id"]) + bundle = bundleresp.json() + for pid in PATIENT_IDS: + patresp_body = { + "resourceType": "Parameters", + "id": "example", + "parameter": [ + {"name": "identifier", "valueIdentifier": {"value": pid}} + ], + } + patresp = requests.post(BASE_URL + "/Patient/$summary", body=patresp_body) + ips = patresp.json() + # print(ips) + body = {"epi": bundle, "ips": ips} + WEBSITE_URL = ( + BASE_URL + + "focusing/focus?preprocessors=preprocessing-service-mvp2&preprocessors=preprocessing-service-manual" + ) + print(WEBSITE_URL) + + # WEBSITE_URL = WEBSITE_DATA["url"] + # WEBSITE_DESC = WEBSITE_DATA["desc"] + status_code, warnings = check_website_status(WEBSITE_URL, body) + log_result( + status_code=status_code, + warnings=warnings, + method="allpreprocesspost", + bundleid=bundleid, + lens="all", + pid=pid, + ) + return 1 def main(): + # Run the Flask app in a separate thread + threading.Thread(target=lambda: app.run(host='0.0.0.0', port=5000)).start() + while True: - for bundleid in BUNDLES: - for lens in LENSES: - for pid in PATIENT_IDS: - WEBSITE_URL = ( - "https://fosps.gravitatehealth.eu/focusing/focus/" - + bundleid["id"] - + "?preprocessors=preprocessing-service-manual&patientIdentifier=" - + pid - + "&lenses=" - + lens - ) - # WEBSITE_URL = WEBSITE_DATA["url"] - # WEBSITE_DESC = WEBSITE_DATA["desc"] - status_code = check_website_status(WEBSITE_URL) - if status_code is not None: - metric_path = f"""gh.focusing.{bundleid["name"]}.{pid}.{lens}""" - send_to_graphite(metric_path, status_code) - # time.sleep(3600) - time.sleep(600) + try: + requests_list = prepare_requests(BUNDLES, LENSES, PATIENT_IDS, BASE_URL, "preprocessperlens") + process_responses(requests_list) + except Exception as err: + logger.debug(f"Error on function chek_preprocessor_data -> {err}") + + try: + requests_list = prepare_requests(BUNDLES, ["all"], PATIENT_IDS, BASE_URL, "alllenses") + process_responses(requests_list) + except Exception as err: + logger.debug(f"Error on function chek_all_lenses_data -> {err}") + + try: + requests_list = prepare_requests(BUNDLES, ["all"], PATIENT_IDS, BASE_URL, "allpreprocess") + process_responses(requests_list) + except Exception as err: + logger.debug(f"Error on function chek_all_preprocess_data -> {err}") + + try: + requests_list = prepare_requests(PREPROCBUNDLES, LENSES, PATIENT_IDS, BASE_URL, "send-preprocess") + process_responses(requests_list) + except Exception as err: + logger.debug(f"Error on function chek_lenses_foralreadypreprocess_data -> {err}") + try: + check_bundles_in_list(BASE_URL) + except Exception as err: + logger.debug(f"Error on function check_bundles_in_list -> {err}") if __name__ == "__main__": main() diff --git a/mainv2.py b/mainv2.py deleted file mode 100644 index d5a13d2..0000000 --- a/mainv2.py +++ /dev/null @@ -1,569 +0,0 @@ -import json -import logging -import os -import socket -import time -from logging.handlers import RotatingFileHandler - -import requests - -# Define the directory and ensure it exists -log_directory = "logs" -if not os.path.exists(log_directory): - os.makedirs(log_directory) -# Set up the logger -logger = logging.getLogger("my_logger") -logger.setLevel(logging.DEBUG) # Configuration - - -# Configure the rotating file handler -log_file = os.path.join(log_directory, "gh-monit.log") - -handler = RotatingFileHandler(log_file, maxBytes=10 * 1024 * 1024, backupCount=10) -handler.setLevel(logging.DEBUG) -# Create a formatter and set it for the handler -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) - -# Add the handler to the logger -logger.addHandler(handler) - - -GRAPHITE_HOST = os.getenv( - "GRAPHITE_HOST", "127.0.0.1" -) # "host.docker.internal" # Replace with your Graphite host -GRAPHITE_PORT = os.getenv( - "GRAPHITE_PORT", 2003 -) # Default port for Carbon plaintext protocol - -BASE_URL = os.getenv("BASE_URL", "https://gravitate-health.lst.tfo.upm.es/") -# print(GRAPHITE_HOST, GRAPHITE_PORT) -logger.debug( - f"BASEURL is {BASE_URL} and HOST IS {GRAPHITE_HOST} and port is {GRAPHITE_PORT}" -) - -LENSES = [ - "lens-selector-mvp2_HIV", - "lens-selector-mvp2_allergy", - "lens-selector-mvp2_diabetes", - "lens-selector-mvp2_interaction", - "lens-selector-mvp2_intolerance", - "lens-selector-mvp2_pregnancy", -] - -# LENSES = requests.post(BASE_URL + "/focusing/lenses") - - -def log_result( - status_code, - warnings, - method, - logger_method=["graphite"], - timestamp=None, - bundleid=None, - lens=None, - pid=None, -): - """ - Sends a metric to Graphite. - """ - metric_path = f"""gh.focusing.{method}.{bundleid["name"]}.{pid}.{lens}""" - timestamp = timestamp or int(time.time()) - if status_code == 200 and not warnings: - value = 0 - elif status_code != 200: - value = 1 - logger.debug( - f"Value 1 for {status_code} and method {method} and bundle {bundleid} and pid {pid}" - ) - - elif status_code == 200 and warnings["preprocessingWarnings"]: - # print(warnings) - # print(warnings["preprocessingWarnings"]) - value = 2 - logger.debug( - f"Value 2 for {status_code} and {warnings} and method {method} and bundle {bundleid} and pid {pid}" - ) - - elif status_code == 200 and len(warnings["lensesWarnings"]) > 0: - value = 3 - logger.debug( - f"Value 3 for {status_code} and {warnings} and method {method} and bundle {bundleid} and pid {pid}" - ) - - else: - value = 4 - logger.debug( - f"Value 4 for {status_code} and {warnings} and method {method} and bundle {bundleid} and pid {pid}" - ) - - message = f"{metric_path} {value} {timestamp}\n" - # print(f"Sending to Graphite: {message}", end="") - - if "graphite" in logger_method: - # Open a socket to Graphite and send the data - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - sock.connect((GRAPHITE_HOST, int(GRAPHITE_PORT))) - sock.sendall(message.encode("utf-8")) - - -# print(LENSES) -PATIENT_IDS = [ - "alicia-1", - "Cecilia-1", - "Pedro-1", - "helen-1", - "maria-1", - "0101010101", - "ips-1", - "ips-2", - "ips-3", - "ips-4", -] -BUNDLES = [ - { - "id": "bundlepackageleaflet-es-94a96e39cfdcd8b378d12dd4063065f9", - "name": "biktarvy-es", - }, - { - "id": "bundlepackageleaflet-en-94a96e39cfdcd8b378d12dd4063065f9", - "name": "biktarvy-en", - }, - { - "id": "bundlepackageleaflet-es-925dad38f0afbba36223a82b3a766438", - "name": "calcio-es", - }, - { - "id": "bundlepackageleaflet-es-2f37d696067eeb6daf1111cfc3272672", - "name": "tegretol-es", - }, - { - "id": "bundlepackageleaflet-en-2f37d696067eeb6daf1111cfc3272672", - "name": "tegretol-en", - }, - { - "id": "bundlepackageleaflet-es-4fab126d28f65a1084e7b50a23200363", - "name": "xenical-es", - }, - { - "id": "bundlepackageleaflet-en-4fab126d28f65a1084e7b50a23200363", - "name": "xenical-en", - }, - { - "id": "bundlepackageleaflet-es-29436a85dac3ea374adb3fa64cfd2578", - "name": "hypericum-es", - }, - { - "id": "bundlepackageleaflet-es-04c9bd6fb89d38b2d83eced2460c4dc1", - "name": "flucelvax-es", - }, - { - "id": "bundlepackageleaflet-en-04c9bd6fb89d38b2d83eced2460c4dc1", - "name": "flucelvax-en", - }, - { - "id": "bundlepackageleaflet-es-49178f16170ee8a6bc2a4361c1748d5f", - "name": "dovato-es", - }, - { - "id": "bundlepackageleaflet-en-49178f16170ee8a6bc2a4361c1748d5f", - "name": "dovato-en", - }, - { - "id": "bundlepackageleaflet-es-e762a2f54b0b24fca4744b1bb7524a5b", - "name": "mirtazapine-es", - }, - { - "id": "bundlepackageleaflet-es-da0fc2395ce219262dfd4f0c9a9f72e1", - "name": "blaston-es", - }, -] - -PREPROCBUNDLES = [ - { - "id": "processedbundlekarveabik", - "name": "biktarvy-en", - }, - { - "id": "bundleprocessed-es-b44cce291e466626afa836fffe72c350", - "name": "biktarvy-es", - }, - { - "id": "bundleprocessed-pt-b44cce291e466626afa836fffe72c350", - "name": "biktarvy-pt", - }, - { - "id": "processedbundlekarveacalcium", - "name": "calcium_pt", - }, - { - "id": "processedbundledovato-en", - "name": "dovato-en", - }, - { - "id": "processedbundledovato-es", - "name": "dovato-es", - }, - { - "id": "processedbundleflucelvax", - "name": "flucelvax-en", - }, - { - "id": "processedbundleflucelvaxES", - "name": "flucelvax-es", - }, - { - "id": "processedbundlehypericum", - "name": "hypericum-es", - }, - { - "id": "bundle-ibu-proc", - "name": "ibuprofen-en", - }, - { - "id": "Processedbundlekarvea", - "name": "karvea-en", - }, - { - "id": "bundle-processed-pt-2d49ae46735143c1323423b7aea24165", - "name": "karvea-pt", - }, - { - "id": "bundle-met-proc", - "name": "metformin-en", - }, - { - "id": "bundle-novo-proc", - "name": "novorapid-en", - }, - { - "id": "bundlepackageleaflet-es-proc-2f37d696067eeb6daf1111cfc3272672", - "name": "tegretrol-es", - }, - { - "id": "bundlepackageleaflet-es-proc-4fab126d28f65a1084e7b50a23200363", - "name": "xenical-es", - }, -] - - -def chek_preprocessor_data(BUNDLES, LENSES, PATIENT_IDS, BASE_URL): - for bundleid in BUNDLES: - for lens in LENSES: - for pid in PATIENT_IDS: - WEBSITE_URL = ( - BASE_URL - + "focusing/focus/" - + bundleid["id"] - + "?preprocessors=preprocessing-service-manual&patientIdentifier=" - + pid - + "&lenses=" - + lens - ) - print(WEBSITE_URL) - # WEBSITE_URL = WEBSITE_DATA["url"] - # WEBSITE_DESC = WEBSITE_DATA["desc"] - status_code, warnings = check_website_status(WEBSITE_URL) - log_result( - status_code=status_code, - warnings=warnings, - method="preprocessperlens", - bundleid=bundleid, - lens=lens, - pid=pid, - ) - time.sleep(1) - return 1 - - -def chek_lenses_foralreadypreprocess_data(BUNDLES, LENSES, PATIENT_IDS, BASE_URL): - for bundleid in PREPROCBUNDLES: - for lens in LENSES: - for pid in PATIENT_IDS: - WEBSITE_URL = ( - BASE_URL - + "focusing/focus/" - + bundleid["id"] - + "?preprocessors=preprocessing-service-manual&patientIdentifier=" - + pid - + "&lenses=" - + lens - ) - print(WEBSITE_URL) - # WEBSITE_URL = WEBSITE_DATA["url"] - # WEBSITE_DESC = WEBSITE_DATA["desc"] - status_code, warnings = check_website_status(WEBSITE_URL) - log_result( - status_code=status_code, - warnings=warnings, - method="send-preprocess", - bundleid=bundleid, - lens=lens, - pid=pid, - ) - time.sleep(1) - - return 1 - - -def chek_all_lenses_data(BUNDLES, PATIENT_IDS, BASE_URL): - for bundleid in BUNDLES: - for pid in PATIENT_IDS: - WEBSITE_URL = ( - BASE_URL - + "focusing/focus/" - + bundleid["id"] - + "?preprocessors=preprocessing-service-manual&patientIdentifier=" - + pid - ) - print(WEBSITE_URL) - # WEBSITE_URL = WEBSITE_DATA["url"] - # WEBSITE_DESC = WEBSITE_DATA["desc"] - status_code, warnings = check_website_status(WEBSITE_URL) - log_result( - status_code=status_code, - warnings=warnings, - method="alllenses", - bundleid=bundleid, - lens="all", - pid=pid, - ) - time.sleep(1) - - return 1 - - -def chek_all_preprocess_data(BUNDLES, PATIENT_IDS, BASE_URL): - for bundleid in BUNDLES: - for pid in PATIENT_IDS: - WEBSITE_URL = ( - BASE_URL - + "focusing/focus/" - + bundleid["id"] - + "?preprocessors=preprocessing-service-mvp2&preprocessors=preprocessing-service-manual&patientIdentifier=" - + pid - ) - print(WEBSITE_URL) - # WEBSITE_URL = WEBSITE_DATA["url"] - # WEBSITE_DESC = WEBSITE_DATA["desc"] - status_code, warnings = check_website_status(WEBSITE_URL) - log_result( - status_code=status_code, - warnings=warnings, - method="allpreprocess", - bundleid=bundleid, - lens="all", - pid=pid, - ) - time.sleep(1) - - return 1 - - -def check_bundles_in_list(BASE_URL): - ENCHACED_WHITE_LIST = [ - "enhanced-bundlebik-alicia", - "enhanced-bundlekarveacalcium-alicia", - "enchanced-bundledovato-es", - "enchanced-bundledovato-en", - "enhanced-bundleflucelvax-alicia", - "enhanced-bundlehypericum-alicia", - "enhancedbundlekarvea-alicia", - "enhancedddbundlekarvea", - "enhanced-bundlebik-pedro", - "enhanced-bundlekarveacalcium-pedro", - "enchanced-bundledovato-pedro-en", - "enchanced-bundledovato-pedro-es", - "enhanced-bundleflucelvax-pedro", - "enhanced-bundlehypericum-pedro", - "enhancedbundlekarveaP", - ] - WEBSITE_URL = BASE_URL + "epi/api/fhir/Bundle" - print(WEBSITE_URL) - - next_url = WEBSITE_URL # Start with the initial URL - - while next_url: # Loop through bundles while 'next' link exists - response = requests.get(next_url) - - if response.status_code != 200: - print(f"Failed to fetch data: {response.status_code}") - break - - # Parse the JSON response - data = response.json() - - # Process entries in the current bundle - if "entry" in data: - for entry in data["entry"]: - bid = entry["resource"]["id"] - if bid in ENCHACED_WHITE_LIST: - continue - print(f"Processing bundle ID: {bid}") - - # Check the 'List' resource for the current bundle ID - nresponse = requests.get(f"{BASE_URL}epi/api/fhir/List?item={bid}") - - if nresponse.status_code != 200: - print(f"Failed to check List resource: {nresponse.status_code}") - continue - - if nresponse.json().get("total", 0) > 0: - value = 0 # If the resource is found - else: - value = 1 # If the resource is not found - - metric_path = f"gh.listmember.{bid}" - timestamp = int(time.time()) - - message = f"{metric_path} {value} {timestamp}\n" - # print(f"Sending to Graphite: {message}", end="") - print(message) - - # Open a socket to Graphite and send the data - # Uncomment and modify if sending to Graphite - # with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: - # sock.connect((GRAPHITE_HOST, int(GRAPHITE_PORT))) - # sock.sendall(message.encode("utf-8")) - - # Check for the next link to paginate - next_url = None - for link in data.get("link", []): - if link["relation"] == "next": - next_url = link["url"] - print(f"Fetching next page: {next_url}") - break # We only need the first 'next' link - print("end") - return 1 - - -def chek_all_prpcessor_with_post_data(BUNDLES, PATIENT_IDS, BASE_URL): - for bundleid in BUNDLES: - bundleresp = requests.get(BASE_URL + "/Bundle/" + bundleid["id"]) - bundle = bundleresp.json() - for pid in PATIENT_IDS: - patresp_body = { - "resourceType": "Parameters", - "id": "example", - "parameter": [ - {"name": "identifier", "valueIdentifier": {"value": pid}} - ], - } - patresp = requests.post(BASE_URL + "/Patient/$summary", body=patresp_body) - ips = patresp.json() - # print(ips) - body = {"epi": bundle, "ips": ips} - WEBSITE_URL = ( - BASE_URL - + "focusing/focus?preprocessors=preprocessing-service-mvp2&preprocessors=preprocessing-service-manual" - ) - print(WEBSITE_URL) - - # WEBSITE_URL = WEBSITE_DATA["url"] - # WEBSITE_DESC = WEBSITE_DATA["desc"] - status_code, warnings = check_website_status(WEBSITE_URL, body) - log_result( - status_code=status_code, - warnings=warnings, - method="allpreprocesspost", - bundleid=bundleid, - lens="all", - pid=pid, - ) - time.sleep(1) - - return 1 - - -def check_website_status(url, body=None): - """ - Checks the status code of a website. - """ - # print(body) - - with open("data.json", "w") as json_file: - json.dump(body, json_file, indent=4) # indent=4 makes the output more readable - - try: - if not body: - response = requests.post(url) - else: - # print(body) - response = requests.post( - url, - json=body, - headers={ - "content-type": "application/json", - "Accept": "application/json", - }, - ) - # print(response.status_code) - # print(response.json()) - focusing_warnings = response.headers.get("gh-focusing-warnings") - # print(focusing_warnings) - # print(response.text) - - if response.status_code == 400: - logger.debug(f"Warning: {response.status_code} and {response.text}") - return response.status_code, {} - - elif focusing_warnings: - # print(response.text) - # print(focusing_warnings) - return response.status_code, eval(focusing_warnings) - else: - # print(response.status_code) - - return response.status_code, {} - except requests.RequestException as e: - print(f"Error checking website status: {e}") - return None - - -def main(): - while True: - try: - chek_preprocessor_data(BUNDLES, LENSES, PATIENT_IDS, BASE_URL) - except Exception as err: - logger.debug(f"Error on function chek_preprocessor_data -> {err}") - time.sleep(10) - try: - chek_all_lenses_data(BUNDLES, PATIENT_IDS, BASE_URL) - except Exception as err: - logger.debug(f"Error on function chek_all_lenses_data -> {err}") - - time.sleep(10) - try: - chek_all_preprocess_data(BUNDLES, PATIENT_IDS, BASE_URL) - except Exception as err: - logger.debug(f"Error on function chek_all_preprocess_data -> {err}") - - time.sleep(10) - try: - chek_all_prpcessor_with_post_data(BUNDLES, PATIENT_IDS, BASE_URL) - except Exception as err: - logger.debug( - f"Error on function chek_all_prpcessor_with_post_data -> {err}" - ) - - time.sleep(10) - try: - chek_lenses_foralreadypreprocess_data( - PREPROCBUNDLES, LENSES, PATIENT_IDS, BASE_URL - ) - except Exception as err: - logger.debug( - f"Error on function chek_lenses_foralreadypreprocess_data -> {err}" - ) - - time.sleep(10) - try: - check_bundles_in_list(BASE_URL) - except Exception as err: - logger.debug(f"Error on function check_bundles_in_list -> {err}") - - time.sleep(3600) - - -if __name__ == "__main__": - main() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..4091e16 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +Flask==3.1.0 +requests==2.32.3 +requests-futures==1.0.2