Skip to content

Commit

Permalink
Update lm.py
Browse files Browse the repository at this point in the history
  • Loading branch information
bahtman authored Nov 26, 2024
1 parent 74b19c8 commit 3f0863b
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion dspy/clients/lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,8 @@ def __call__(self, prompt=None, messages=None, **kwargs):
cache = kwargs.pop("cache", self.cache)
messages = messages or [{"role": "user", "content": prompt}]
kwargs = {**self.kwargs, **kwargs}

callable_kwargs = {k: v for k, v in kwargs.items() if isinstance(v, Callable)}
kwargs = {k: v for k, v in kwargs.items() if not isinstance(v, Callable)}
# Make the request and handle LRU & disk caching.
if self.model_type == "chat":
completion = cached_litellm_completion if cache else litellm_completion
Expand All @@ -94,6 +95,7 @@ def __call__(self, prompt=None, messages=None, **kwargs):
response = completion(
request=ujson.dumps(dict(model=self.model, messages=messages, **kwargs)),
num_retries=self.num_retries,
**callable_kwargs,
)
outputs = [c.message.content if hasattr(c, "message") else c["text"] for c in response["choices"]]

Expand Down

0 comments on commit 3f0863b

Please sign in to comment.