From 895f1c8e9e36261635f405f7421310e847c0f879 Mon Sep 17 00:00:00 2001 From: sabaimran Date: Tue, 3 Sep 2024 13:16:34 -0700 Subject: [PATCH] Gracefully close thread when there's an exception in the anthropic llm thread. Include full stack traces. --- .../processor/conversation/anthropic/utils.py | 49 ++++++++++--------- .../processor/conversation/openai/utils.py | 2 +- 2 files changed, 27 insertions(+), 24 deletions(-) diff --git a/src/khoj/processor/conversation/anthropic/utils.py b/src/khoj/processor/conversation/anthropic/utils.py index dbae1e118..79ccac4ef 100644 --- a/src/khoj/processor/conversation/anthropic/utils.py +++ b/src/khoj/processor/conversation/anthropic/utils.py @@ -89,26 +89,29 @@ def anthropic_chat_completion_with_backoff( def anthropic_llm_thread( g, messages, system_prompt, model_name, temperature, api_key, max_prompt_size=None, model_kwargs=None ): - if api_key not in anthropic_clients: - client: anthropic.Anthropic = anthropic.Anthropic(api_key=api_key) - anthropic_clients[api_key] = client - else: - client: anthropic.Anthropic = anthropic_clients[api_key] - - formatted_messages: List[anthropic.types.MessageParam] = [ - anthropic.types.MessageParam(role=message.role, content=message.content) for message in messages - ] - - with client.messages.stream( - messages=formatted_messages, - model=model_name, # type: ignore - temperature=temperature, - system=system_prompt, - timeout=20, - max_tokens=DEFAULT_MAX_TOKENS_ANTHROPIC, - **(model_kwargs or dict()), - ) as stream: - for text in stream.text_stream: - g.send(text) - - g.close() + try: + if api_key not in anthropic_clients: + client: anthropic.Anthropic = anthropic.Anthropic(api_key=api_key) + anthropic_clients[api_key] = client + else: + client: anthropic.Anthropic = anthropic_clients[api_key] + + formatted_messages: List[anthropic.types.MessageParam] = [ + anthropic.types.MessageParam(role=message.role, content=message.content) for message in messages + ] + + with client.messages.stream( + messages=formatted_messages, + model=model_name, # type: ignore + temperature=temperature, + system=system_prompt, + timeout=20, + max_tokens=DEFAULT_MAX_TOKENS_ANTHROPIC, + **(model_kwargs or dict()), + ) as stream: + for text in stream.text_stream: + g.send(text) + except Exception as e: + logger.error(f"Error in anthropic_llm_thread: {e}", exc_info=True) + finally: + g.close() diff --git a/src/khoj/processor/conversation/openai/utils.py b/src/khoj/processor/conversation/openai/utils.py index b601cbbd9..1a42113ee 100644 --- a/src/khoj/processor/conversation/openai/utils.py +++ b/src/khoj/processor/conversation/openai/utils.py @@ -131,6 +131,6 @@ def llm_thread(g, messages, model_name, temperature, openai_api_key=None, api_ba elif delta_chunk.content: g.send(delta_chunk.content) except Exception as e: - logger.error(f"Error in llm_thread: {e}") + logger.error(f"Error in llm_thread: {e}", exc_info=True) finally: g.close()