-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
25 changed files
with
1,473 additions
and
217 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -55,5 +55,6 @@ RUN apt-get clean && apt-get purge | |
|
||
USER mitodl | ||
|
||
EXPOSE 8888 | ||
EXPOSE 8001 | ||
ENV PORT 8001 |
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
"""AI-specific functions for ai_agents.""" | ||
|
||
from typing import Optional | ||
|
||
from django.conf import settings | ||
from llama_index.core.agent import AgentRunner | ||
from llama_index.core.llms.llm import LLM | ||
|
||
from ai_chatbots.constants import AgentClassEnum, LLMClassEnum | ||
from ai_chatbots.proxies import AIProxy | ||
|
||
|
||
def get_llm(model_name: Optional[str] = None, proxy: Optional[AIProxy] = None) -> LLM: | ||
""" | ||
Get the LLM from the given model name, | ||
incorporating a proxy if passed. | ||
Args: | ||
model_name: The name of the model | ||
proxy: The proxy to use | ||
Returns: | ||
The LLM | ||
""" | ||
if not model_name: | ||
model_name = settings.AI_MODEL | ||
try: | ||
llm_class = LLMClassEnum[settings.AI_PROVIDER].value | ||
return llm_class( | ||
model=model_name, | ||
**(proxy.get_api_kwargs() if proxy else {}), | ||
additional_kwargs=(proxy.get_additional_kwargs() if proxy else {}), | ||
) | ||
except KeyError as ke: | ||
msg = f"{settings.AI_PROVIDER} not supported" | ||
raise NotImplementedError(msg) from ke | ||
except Exception as ex: | ||
msg = f"Error instantiating LLM: {model_name}" | ||
raise ValueError(msg) from ex | ||
|
||
|
||
def get_agent() -> AgentRunner: | ||
"""Get the appropriate chatbot agent for the AI provider""" | ||
try: | ||
return AgentClassEnum[settings.AI_PROVIDER].value | ||
except KeyError as ke: | ||
msg = f"{settings.AI_PROVIDER} not supported" | ||
raise NotImplementedError(msg) from ke |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,4 +6,4 @@ | |
class AiChatConfig(AppConfig): | ||
"""AI Chat Appconfig""" | ||
|
||
name = "ai_agents" | ||
name = "ai_chatbots" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.