Skip to content

Commit

Permalink
Fix restful api for megaservice gateway (opea-project#52)
Browse files Browse the repository at this point in the history
Signed-off-by: Spycsh <[email protected]>
Signed-off-by: lvliang-intel <[email protected]>
  • Loading branch information
lvliang-intel authored May 13, 2024
1 parent 7510502 commit ca18b19
Showing 1 changed file with 23 additions and 10 deletions.
33 changes: 23 additions & 10 deletions comps/cores/mega/orchestrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,23 @@ def __init__(self, host="0.0.0.0", port=8000, endpoint=MegaServiceEndpoint.CHAT_

def define_routes(self):
self.gateway.app.router.add_api_route(self.endpoint, self.endpoint_func, methods=["POST"])
self.gateway.app.router.add_api_route(str(MegaServiceEndpoint.LIST_SERVICE), self.list_service, methods=["GET"])
self.gateway.app.router.add_api_route(
str(MegaServiceEndpoint.LIST_SERVICE), self.endpoint_func, methods=["GET"]
)
self.gateway.app.router.add_api_route(
str(MegaServiceEndpoint.LIST_PARAMETERS), self.endpoint_func, methods=["GET"]
str(MegaServiceEndpoint.LIST_PARAMETERS), self.list_parameter, methods=["GET"]
)

def start_server(self):
self.gateway.start()

def list_service(self):
response = {}
for node in self.all_leaves():
response = {self.services[node].description: self.services[node].endpoint_path}
return response

def list_parameter(self):
pass

async def handle_chat_qna(self, request: Request):
data = await request.json()
chat_request = ChatCompletionRequest.parse_obj(data)
Expand All @@ -78,14 +88,15 @@ async def handle_chat_qna(self, request: Request):
text_list = [item["text"] for item in message["content"] if item["type"] == "text"]
prompt = "\n".join(text_list)
self.schedule(initial_inputs={"text": prompt})
response = self.get_all_final_outputs()
last_node = self.all_leaves()[-1]
response = self.result_dict[last_node]["text"]
choices = []
usage = UsageInfo()
choices.append(
ChatCompletionResponseChoice(
index=0,
message=ChatMessage(role="assistant", content=response),
finish_reason=response.get("finish_reason", "stop"),
finish_reason="stop",
)
)
return ChatCompletionResponse(model="chatqna", choices=choices, usage=usage)
Expand All @@ -108,14 +119,15 @@ async def handle_code_gen(self, request: Request):
text_list = [item["text"] for item in message["content"] if item["type"] == "text"]
prompt = "\n".join(text_list)
self.schedule(initial_inputs={"text": prompt})
response = self.get_all_final_outputs()
last_node = self.all_leaves()[-1]
response = self.result_dict[last_node]["text"]
choices = []
usage = UsageInfo()
choices.append(
ChatCompletionResponseChoice(
index=0,
message=ChatMessage(role="assistant", content=response),
finish_reason=response.get("finish_reason", "stop"),
finish_reason="stop",
)
)
return ChatCompletionResponse(model="chatqna", choices=choices, usage=usage)
Expand All @@ -130,14 +142,15 @@ async def handle_code_trans(self, request: Request):
text_list = [item["text"] for item in message["content"] if item["type"] == "text"]
prompt = "\n".join(text_list)
self.schedule(initial_inputs={"text": prompt})
response = self.get_all_final_outputs()
last_node = self.all_leaves()[-1]
response = self.result_dict[last_node]["text"]
choices = []
usage = UsageInfo()
choices.append(
ChatCompletionResponseChoice(
index=0,
message=ChatMessage(role="assistant", content=response),
finish_reason=response.get("finish_reason", "stop"),
finish_reason="stop",
)
)
return ChatCompletionResponse(model="chatqna", choices=choices, usage=usage)
Expand Down

0 comments on commit ca18b19

Please sign in to comment.