From 6b945db66a75a9079c0d669b540ad65980327c62 Mon Sep 17 00:00:00 2001 From: "Tsai, Louie" Date: Sat, 16 Nov 2024 00:30:11 -0800 Subject: [PATCH] Return Mega/Micro Service Version number at runtime Signed-off-by: Tsai, Louie fix Signed-off-by: Tsai, Louie --- README.md | 16 ++++++++++++++++ comps/cores/mega/http_service.py | 4 +++- comps/llms/text-generation/README.md | 8 +++++++- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f8c2557849..6c83759025 100644 --- a/README.md +++ b/README.md @@ -139,6 +139,22 @@ from comps import ChatQnAGateway self.gateway = ChatQnAGateway(megaservice=self.megaservice, host="0.0.0.0", port=self.port) ``` +## Check Mega/Micro Service health status and version number + +Use below command to check Mega/Micro Service status. + +```bash +curl http://${your_ip}:${service_port}/v1/health_check\ + -X GET \ + -H 'Content-Type: application/json' +``` + +Users should get output like below example if Mega/Micro Service works correctly. + +```bash +{"Service Title":"ChatQnAGateway/MicroService","Version":"1.0","Service Description":"OPEA Microservice Infrastructure"} +``` + ## Contributing to OPEA Welcome to the OPEA open-source community! We are thrilled to have you here and excited about the potential contributions you can bring to the OPEA platform. Whether you are fixing bugs, adding new GenAI components, improving documentation, or sharing your unique use cases, your contributions are invaluable. diff --git a/comps/cores/mega/http_service.py b/comps/cores/mega/http_service.py index 283540f493..6225e8eaa4 100644 --- a/comps/cores/mega/http_service.py +++ b/comps/cores/mega/http_service.py @@ -69,7 +69,9 @@ def _create_app(self): ) async def _health_check(): """Get the health status of this GenAI microservice.""" - return {"Service Title": self.title, "Service Description": self.description} + from comps.version import __version__ + + return {"Service Title": self.title, "Version": __version__, "Service Description": self.description} @app.get( path="/v1/statistics", diff --git a/comps/llms/text-generation/README.md b/comps/llms/text-generation/README.md index 824d8a2277..ea2ed61c4c 100644 --- a/comps/llms/text-generation/README.md +++ b/comps/llms/text-generation/README.md @@ -257,7 +257,7 @@ docker compose -f docker_compose_llm.yaml up -d ## 🚀3. Consume LLM Service -### 3.1 Check Service Status +### 3.1 Check Service Status and its Version ```bash curl http://${your_ip}:9000/v1/health_check\ @@ -265,6 +265,12 @@ curl http://${your_ip}:9000/v1/health_check\ -H 'Content-Type: application/json' ``` +Users should get output like below example if MicroService works correctly. + +```bash +{"Service Title":"ChatQnAGateway/MicroService","Version":"1.0","Service Description":"OPEA Microservice Infrastructure"} +``` + ### 3.2 Verify the LLM Service #### 3.2.1 Verify the TGI Service