-
Notifications
You must be signed in to change notification settings - Fork 15.9k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
community: Implement bind_tools
for ChatTongyi
#20725
Changes from 6 commits
6c3fec9
f22c071
ed51e39
cc76a34
311ea1c
8083b68
a806c20
6fcbe73
57550d7
31f7d5c
c1ecd42
b1343ba
f6326af
01f8156
497e314
6b2f224
372f404
0d15e43
ff9f771
336f3b8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,6 +12,8 @@ | |
List, | ||
Mapping, | ||
Optional, | ||
Sequence, | ||
Type, | ||
Union, | ||
cast, | ||
) | ||
|
@@ -20,6 +22,7 @@ | |
AsyncCallbackManagerForLLMRun, | ||
CallbackManagerForLLMRun, | ||
) | ||
from langchain_core.language_models import LanguageModelInput | ||
from langchain_core.language_models.chat_models import BaseChatModel | ||
from langchain_core.messages import ( | ||
AIMessage, | ||
|
@@ -32,6 +35,8 @@ | |
HumanMessageChunk, | ||
SystemMessage, | ||
SystemMessageChunk, | ||
ToolMessage, | ||
ToolMessageChunk, | ||
) | ||
from langchain_core.output_parsers.openai_tools import ( | ||
make_invalid_tool_call, | ||
|
@@ -42,8 +47,11 @@ | |
ChatGenerationChunk, | ||
ChatResult, | ||
) | ||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator | ||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator | ||
from langchain_core.runnables import Runnable | ||
from langchain_core.tools import BaseTool | ||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env | ||
from langchain_core.utils.function_calling import convert_to_openai_tool | ||
from requests.exceptions import HTTPError | ||
from tenacity import ( | ||
before_sleep_log, | ||
|
@@ -88,8 +96,14 @@ def convert_dict_to_message( | |
) | ||
else: | ||
additional_kwargs = {} | ||
|
||
return ( | ||
AIMessageChunk(content=content) | ||
AIMessageChunk( | ||
content=content, | ||
additional_kwargs=additional_kwargs, | ||
tool_calls=tool_calls, | ||
invalid_tool_calls=invalid_tool_calls, | ||
) | ||
if is_chunk | ||
else AIMessage( | ||
content=content, | ||
|
@@ -104,6 +118,23 @@ def convert_dict_to_message( | |
if is_chunk | ||
else SystemMessage(content=content) | ||
) | ||
elif role == "tool": | ||
additional_kwargs = {} | ||
if "name" in _dict: | ||
additional_kwargs["name"] = _dict["name"] | ||
return ( | ||
ToolMessageChunk( | ||
content=_dict.get("content", ""), | ||
tool_call_id=_dict.get("tool_call_id"), | ||
additional_kwargs=additional_kwargs, | ||
) | ||
if is_chunk | ||
else ToolMessage( | ||
content=_dict.get("content", ""), | ||
tool_call_id=_dict.get("tool_call_id"), | ||
additional_kwargs=additional_kwargs, | ||
) | ||
) | ||
else: | ||
return ( | ||
ChatMessageChunk(role=role, content=content) | ||
|
@@ -117,11 +148,25 @@ def convert_message_chunk_to_message(message_chunk: BaseMessageChunk) -> BaseMes | |
if isinstance(message_chunk, HumanMessageChunk): | ||
return HumanMessage(content=message_chunk.content) | ||
elif isinstance(message_chunk, AIMessageChunk): | ||
return AIMessage(content=message_chunk.content) | ||
# assert message_chunk is None | ||
return ( | ||
AIMessage( | ||
content=message_chunk.content, | ||
tool_calls=message_chunk.additional_kwargs["tool_calls"], | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is confusing, because in at this point, is the following true? if message_chunk.additional_kwargs["tool_calls"]:
item = message_chunk.additional_kwargs["tool_calls"][0]
assert isinstance(item["args"], dict) # not a string There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sorry for delayed response, having a really tough time with my job 😫 |
||
) | ||
if "tool_calls" in message_chunk.additional_kwargs | ||
else AIMessage(content=message_chunk.content) | ||
) | ||
elif isinstance(message_chunk, SystemMessageChunk): | ||
return SystemMessage(content=message_chunk.content) | ||
elif isinstance(message_chunk, ChatMessageChunk): | ||
return ChatMessage(role=message_chunk.role, content=message_chunk.content) | ||
elif isinstance(message_chunk, ToolMessageChunk): | ||
return ToolMessage( | ||
content=message_chunk.content, | ||
tool_call_id=message_chunk.tool_call_id, | ||
name=message_chunk.name, | ||
) | ||
else: | ||
raise TypeError(f"Got unknown type {message_chunk}") | ||
|
||
|
@@ -136,8 +181,17 @@ def convert_message_to_dict(message: BaseMessage) -> dict: | |
message_dict = {"role": "user", "content": message.content} | ||
elif isinstance(message, AIMessage): | ||
message_dict = {"role": "assistant", "content": message.content} | ||
if "tool_calls" in message.additional_kwargs: | ||
message_dict["tool_calls"] = message.additional_kwargs["tool_calls"] | ||
elif isinstance(message, SystemMessage): | ||
message_dict = {"role": "system", "content": message.content} | ||
elif isinstance(message, ToolMessage): | ||
message_dict = { | ||
"role": "tool", | ||
"tool_call_id": message.tool_call_id, | ||
"content": message.content, | ||
"name": message.name, | ||
} | ||
else: | ||
raise TypeError(f"Got unknown type {message}") | ||
return message_dict | ||
|
@@ -373,9 +427,30 @@ def _stream( | |
params: Dict[str, Any] = self._invocation_params( | ||
messages=messages, stop=stop, stream=True, **kwargs | ||
) | ||
prev_msg_content = "" | ||
|
||
for stream_resp, is_last_chunk in generate_with_last_element_mark( | ||
self.stream_completion_with_retry(**params) | ||
): | ||
choice = stream_resp["output"]["choices"][0] | ||
message = choice["message"] | ||
if ( | ||
choice["finish_reason"] == "null" | ||
and message["content"] == "" | ||
and "tool_calls" not in message | ||
): | ||
continue | ||
|
||
# If it's a tool call response, wait until it's finished | ||
if "tool_calls" in message and choice["finish_reason"] == "null": | ||
continue | ||
|
||
# If we are streaming without `incremental_output = True`, | ||
# we need to chop off the previous message content | ||
if not params.get("incremental_output", False): | ||
message["content"] = message["content"].replace(prev_msg_content, "") | ||
prev_msg_content += message["content"] | ||
|
||
chunk = ChatGenerationChunk( | ||
**self._chat_generation_from_qwen_resp( | ||
stream_resp, is_chunk=True, is_last_chunk=is_last_chunk | ||
|
@@ -413,14 +488,13 @@ def _invocation_params( | |
params = {**self._default_params, **kwargs} | ||
if stop is not None: | ||
params["stop"] = stop | ||
if params.get("stream"): | ||
# According to the Tongyi official docs, | ||
# `incremental_output` with `tools` is not supported yet | ||
if params.get("stream") and not params.get("tools"): | ||
params["incremental_output"] = True | ||
|
||
message_dicts = [convert_message_to_dict(m) for m in messages] | ||
|
||
# According to the docs, the last message should be a `user` message | ||
if message_dicts[-1]["role"] != "user": | ||
raise ValueError("Last message should be user message.") | ||
# And the `system` message should be the first message if present | ||
system_message_indices = [ | ||
i for i, m in enumerate(message_dicts) if m["role"] == "system" | ||
|
@@ -470,3 +544,22 @@ def _chunk_to_generation(chunk: ChatGenerationChunk) -> ChatGeneration: | |
message=convert_message_chunk_to_message(chunk.message), | ||
generation_info=chunk.generation_info, | ||
) | ||
|
||
def bind_tools( | ||
self, | ||
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], | ||
**kwargs: Any, | ||
) -> Runnable[LanguageModelInput, BaseMessage]: | ||
"""Bind tool-like objects to this chat model. | ||
|
||
Args: | ||
tools: A list of tool definitions to bind to this chat model. | ||
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic | ||
models, callables, and BaseTools will be automatically converted to | ||
their schema dictionary representation. | ||
**kwargs: Any additional parameters to pass to the | ||
:class:`~langchain.runnable.Runnable` constructor. | ||
""" | ||
|
||
formatted_tools = [convert_to_openai_tool(tool) for tool in tools] | ||
return super().bind(tools=formatted_tools, **kwargs) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
are these tool calls fully formed in a streaming context?
if not you can specify
tool_call_chunks
on AIMessageChunk instead, withargs
a (partial json) string. See example here:langchain/libs/partners/openai/langchain_openai/chat_models/base.py
Line 264 in c010ec8
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks a lot, I have made some updates about this.