-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore: Update create_chat_openai_model to include a seed parameter
The `create_chat_openai_model` function in the `openai.py` module has been updated to include a new `seed` parameter. This parameter allows for setting a random seed when creating a `ChatOpenAI` instance. This change improves the control and reproducibility of the generated chat responses.
- Loading branch information
Showing
3 changed files
with
38 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,22 +1,26 @@ | ||
from functools import lru_cache | ||
from langchain_openai import ChatOpenAI | ||
from ..const import DEFAULT_MODEL | ||
|
||
|
||
@lru_cache(maxsize=None) | ||
def create_chat_openai_model( | ||
llm: ChatOpenAI | str | None = None, | ||
seed: int | None = None, | ||
) -> ChatOpenAI: | ||
"""Create a ChatOpenAI instance. | ||
Args: | ||
llm (ChatOpenAI | str | None, optional): ChatOpenAI instance or model name. Defaults to None. | ||
seed (int, optional): Random seed. Defaults to None. | ||
Returns: | ||
ChatOpenAI: ChatOpenAI instance | ||
Note: | ||
seed is used only when llm is a str or None. | ||
""" # noqa | ||
if isinstance(llm, str): | ||
return ChatOpenAI(model=llm) | ||
return ChatOpenAI(model=llm, seed=seed) | ||
else: | ||
return llm or ChatOpenAI(model=DEFAULT_MODEL) | ||
return llm or ChatOpenAI(model=DEFAULT_MODEL, seed=seed) |