Skip to content

Commit

Permalink
customize Anthropic client via kwargs, also bumps default model versi…
Browse files Browse the repository at this point in the history
…on (#813)

* customize Anthropic client via kwargs

* bump default model
  • Loading branch information
danthegoodman1 authored Dec 10, 2024
1 parent e7fa1ca commit af6f5ec
Showing 1 changed file with 11 additions and 3 deletions.
14 changes: 11 additions & 3 deletions src/pipecat/services/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,12 @@ def assistant(self) -> "AnthropicAssistantContextAggregator":


class AnthropicLLMService(LLMService):
"""This class implements inference with Anthropic's AI models"""
"""
This class implements inference with Anthropic's AI models.
Can provide a custom client via the `client` kwarg, allowing you to
use `AsyncAnthropicBedrock` and `AsyncAnthropicVertex` clients
"""

class InputParams(BaseModel):
enable_prompt_caching_beta: Optional[bool] = False
Expand All @@ -89,12 +94,15 @@ def __init__(
self,
*,
api_key: str,
model: str = "claude-3-5-sonnet-20240620",
model: str = "claude-3-5-sonnet-20241022",
params: InputParams = InputParams(),
client=None,
**kwargs,
):
super().__init__(**kwargs)
self._client = AsyncAnthropic(api_key=api_key)
self._client = client or AsyncAnthropic(
api_key=api_key
) # if the client is provided, use it and remove it, otherwise create a new one
self.set_model_name(model)
self._settings = {
"max_tokens": params.max_tokens,
Expand Down

0 comments on commit af6f5ec

Please sign in to comment.