Skip to content

Commit

Permalink
Use default offline/openai chat model to extract DB search queries
Browse files Browse the repository at this point in the history
Make usage of the first offline/openai chat model as the default LLM
to use for background tasks more explicit

The idea is to use the default/first chat model for all background
activities, like user message to extract search queries to perform.
This is controlled by the server admin.

The chat model set by the user is used for user-facing functions like
generating chat responses
  • Loading branch information
debanjum committed Jan 3, 2024
1 parent e28adf2 commit 4a234c8
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 5 deletions.
4 changes: 2 additions & 2 deletions src/khoj/database/adapters/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ async def ahas_offline_chat():
return await OfflineChatProcessorConversationConfig.objects.filter(enabled=True).aexists()

@staticmethod
async def get_offline_chat():
async def get_default_offline_llm():
return await ChatModelOptions.objects.filter(model_type="offline").afirst()

@staticmethod
Expand All @@ -397,7 +397,7 @@ async def has_openai_chat():
return await OpenAIProcessorConversationConfig.objects.filter().aexists()

@staticmethod
async def get_openai_chat():
async def get_default_openai_llm():
return await ChatModelOptions.objects.filter(model_type="openai").afirst()

@staticmethod
Expand Down
7 changes: 4 additions & 3 deletions src/khoj/routers/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,8 +853,8 @@ async def extract_references_and_questions(
and conversation_config.model_type == ChatModelOptions.ModelType.OFFLINE
):
using_offline_chat = True
offline_chat = await ConversationAdapters.get_offline_chat()
chat_model = offline_chat.chat_model
default_offline_llm = await ConversationAdapters.get_default_offline_llm()
chat_model = default_offline_llm.chat_model
if state.gpt4all_processor_config is None:
state.gpt4all_processor_config = GPT4AllProcessorModel(chat_model=chat_model)

Expand All @@ -865,8 +865,9 @@ async def extract_references_and_questions(
)
elif conversation_config and conversation_config.model_type == ChatModelOptions.ModelType.OPENAI:
openai_chat_config = await ConversationAdapters.get_openai_chat_config()
default_openai_llm = await ConversationAdapters.get_default_openai_llm()
api_key = openai_chat_config.api_key
chat_model = conversation_config.chat_model
chat_model = default_openai_llm.chat_model
inferred_queries = extract_questions(
defiltered_query, model=chat_model, api_key=api_key, conversation_log=meta_log
)
Expand Down

0 comments on commit 4a234c8

Please sign in to comment.