diff --git a/libs/community/langchain_community/chat_models/zhipuai.py b/libs/community/langchain_community/chat_models/zhipuai.py index 06f299e010e81..99b58697f1a01 100644 --- a/libs/community/langchain_community/chat_models/zhipuai.py +++ b/libs/community/langchain_community/chat_models/zhipuai.py @@ -591,13 +591,19 @@ def _stream( if len(chunk["choices"]) == 0: continue choice = chunk["choices"][0] + usage = chunk.get("usage", None) + model_name = chunk.get("model", "") chunk = _convert_delta_to_message_chunk( choice["delta"], default_chunk_class ) finish_reason = choice.get("finish_reason", None) generation_info = ( - {"finish_reason": finish_reason} + { + "finish_reason": finish_reason, + "token_usage": usage, + "model_name": model_name, + } if finish_reason is not None else None ) @@ -678,13 +684,19 @@ async def _astream( if len(chunk["choices"]) == 0: continue choice = chunk["choices"][0] + usage = chunk.get("usage", None) + model_name = chunk.get("model", "") chunk = _convert_delta_to_message_chunk( choice["delta"], default_chunk_class ) finish_reason = choice.get("finish_reason", None) generation_info = ( - {"finish_reason": finish_reason} + { + "finish_reason": finish_reason, + "token_usage": usage, + "model_name": model_name, + } if finish_reason is not None else None )