-
Notifications
You must be signed in to change notification settings - Fork 131
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
fix: make llama.cpp Chat Generator compatible with new ChatMessage
#1254
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,7 +2,7 @@ | |
from typing import Any, Dict, List, Optional | ||
|
||
from haystack import component | ||
from haystack.dataclasses import ChatMessage, ChatRole | ||
from haystack.dataclasses import ChatMessage | ||
from llama_cpp import Llama | ||
from llama_cpp.llama_tokenizer import LlamaHFTokenizer | ||
|
||
|
@@ -21,6 +21,10 @@ def _convert_message_to_llamacpp_format(message: ChatMessage) -> Dict[str, str]: | |
if message.name: | ||
formatted_msg["name"] = message.name | ||
|
||
if formatted_msg["role"] == "tool": | ||
formatted_msg["name"] = message.tool_call_result.origin.tool_name | ||
formatted_msg["content"] = message.tool_call_result.result | ||
|
||
return formatted_msg | ||
|
||
|
||
|
@@ -114,26 +118,31 @@ def run(self, messages: List[ChatMessage], generation_kwargs: Optional[Dict[str, | |
formatted_messages = [_convert_message_to_llamacpp_format(msg) for msg in messages] | ||
|
||
response = self.model.create_chat_completion(messages=formatted_messages, **updated_generation_kwargs) | ||
replies = [ | ||
ChatMessage( | ||
content=choice["message"]["content"], | ||
role=ChatRole[choice["message"]["role"].upper()], | ||
name=None, | ||
meta={ | ||
"response_id": response["id"], | ||
"model": response["model"], | ||
"created": response["created"], | ||
"index": choice["index"], | ||
"finish_reason": choice["finish_reason"], | ||
"usage": response["usage"], | ||
}, | ||
) | ||
for choice in response["choices"] | ||
] | ||
|
||
for reply, choice in zip(replies, response["choices"]): | ||
|
||
replies = [] | ||
|
||
for choice in response["choices"]: | ||
meta = { | ||
"response_id": response["id"], | ||
"model": response["model"], | ||
"created": response["created"], | ||
"index": choice["index"], | ||
"finish_reason": choice["finish_reason"], | ||
"usage": response["usage"], | ||
} | ||
|
||
name = None | ||
tool_calls = choice.get("message", {}).get("tool_calls", []) | ||
if tool_calls: | ||
reply.meta["tool_calls"] = tool_calls | ||
reply.name = tool_calls[0]["function"]["name"] if tool_calls else None | ||
meta["tool_calls"] = tool_calls | ||
name = tool_calls[0]["function"]["name"] | ||
|
||
reply = ChatMessage.from_assistant(choice["message"]["content"], meta=meta) | ||
if name: | ||
if hasattr(reply, "_name"): | ||
reply._name = name # new ChatMessage | ||
elif hasattr(reply, "name"): | ||
reply.name = name # legacy ChatMessage | ||
Comment on lines
+141
to
+145
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. once we release 2.9.0, we can get rid of this ugly check There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 👍 after 2.9.0 release, we may consider to refactor |
||
replies.append(reply) | ||
|
||
return {"replies": replies} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
starting from 2.9.0, Haystack
function
messages will be automatically converted totool
messages. This change ensures compatibility.