Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Common: Improve langchain message converters #65

Merged
merged 5 commits into from
Feb 27, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions app/common/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
from common.singleton import Singleton
from common.message_converters import (
convert_iris_message_to_langchain_message,
convert_langchain_message_to_iris_message,
)
29 changes: 29 additions & 0 deletions app/common/message_converters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from langchain_core.messages import BaseMessage

from domain import IrisMessage, IrisMessageRole


def convert_iris_message_to_langchain_message(iris_message: IrisMessage) -> BaseMessage:
match iris_message.role:
case IrisMessageRole.USER:
role = "human"
case IrisMessageRole.ASSISTANT:
role = "ai"
case IrisMessageRole.SYSTEM:
role = "system"
case _:
raise ValueError(f"Unknown message role: {iris_message.role}")
return BaseMessage(content=iris_message.text, type=role)


def convert_langchain_message_to_iris_message(base_message: BaseMessage) -> IrisMessage:
match base_message.type:
case "human":
role = IrisMessageRole.USER
case "ai":
role = IrisMessageRole.ASSISTANT
case "system":
role = IrisMessageRole.SYSTEM
case _:
raise ValueError(f"Unknown message type: {base_message.type}")
return IrisMessage(text=base_message.content, role=role)
4 changes: 2 additions & 2 deletions app/llm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from llm.request_handler_interface import RequestHandler
from llm.completion_arguments import *
from llm.basic_request_handler import BasicRequestHandler, DefaultModelId
from llm.external import *
from llm.request_handler import *
29 changes: 6 additions & 23 deletions app/llm/langchain/iris_langchain_chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,13 @@
from langchain_core.outputs import ChatResult
from langchain_core.outputs.chat_generation import ChatGeneration

from domain import IrisMessage, IrisMessageRole
from common import (
convert_iris_message_to_langchain_message,
convert_langchain_message_to_iris_message,
)
from llm import RequestHandler, CompletionArguments


def convert_iris_message_to_base_message(iris_message: IrisMessage) -> BaseMessage:
role_map = {
IrisMessageRole.USER: "human",
IrisMessageRole.ASSISTANT: "ai",
IrisMessageRole.SYSTEM: "system",
}
return BaseMessage(content=iris_message.text, type=role_map[iris_message.role])


def convert_base_message_to_iris_message(base_message: BaseMessage) -> IrisMessage:
role_map = {
"human": IrisMessageRole.USER,
"ai": IrisMessageRole.ASSISTANT,
"system": IrisMessageRole.SYSTEM,
}
return IrisMessage(
text=base_message.content, role=IrisMessageRole(role_map[base_message.type])
)


class IrisLangchainChatModel(BaseChatModel):
"""Custom langchain chat model for our own request handler"""

Expand All @@ -47,11 +30,11 @@ def _generate(
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any
) -> ChatResult:
iris_messages = [convert_base_message_to_iris_message(m) for m in messages]
iris_messages = [convert_langchain_message_to_iris_message(m) for m in messages]
iris_message = self.request_handler.chat(
iris_messages, CompletionArguments(stop=stop)
)
base_message = convert_iris_message_to_base_message(iris_message)
base_message = convert_iris_message_to_langchain_message(iris_message)
chat_generation = ChatGeneration(message=base_message)
return ChatResult(generations=[chat_generation])

Expand Down
2 changes: 2 additions & 0 deletions app/llm/request_handler/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from basic_request_handler import BasicRequestHandler
from request_handler_interface import RequestHandler
Loading