Skip to content

Commit

Permalink
Merge branch 'master' into feature/add-logging
Browse files Browse the repository at this point in the history
  • Loading branch information
CaelumF authored Jul 22, 2024
2 parents 2330c6e + a55aeb6 commit 9b21bae
Show file tree
Hide file tree
Showing 13 changed files with 216 additions and 30 deletions.
9 changes: 7 additions & 2 deletions camel/models/anthropic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for Anthropic backend.
Expand All @@ -48,12 +49,16 @@ def __init__(
Anthropic service. (default: :obj:`None`)
url (Optional[str]): The url to the Anthropic service. (default:
:obj:`None`)
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `AnthropicTokenCounter` will
be used.
"""
super().__init__(model_type, model_config_dict, api_key, url)
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)
self._api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")
self._url = url or os.environ.get("ANTHROPIC_API_BASE_URL")
self.client = Anthropic(api_key=self._api_key, base_url=self._url)
self._token_counter: Optional[BaseTokenCounter] = None

def _convert_response_from_anthropic_to_openai(self, response):
# openai ^1.0.0 format, reference openai/types/chat/chat_completion.py
Expand Down
6 changes: 5 additions & 1 deletion camel/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for the model backend.
Expand All @@ -41,13 +42,16 @@ def __init__(
api_key (Optional[str]): The API key for authenticating with the
model service.
url (Optional[str]): The url to the model service.
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `OpenAITokenCounter` will
be used.
"""
self.model_type = model_type

self.model_config_dict = model_config_dict
self._api_key = api_key
self._url = url
self.check_model_config()
self._token_counter = token_counter

@property
@abstractmethod
Expand Down
16 changes: 14 additions & 2 deletions camel/models/gemini_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for Gemini backend.
Expand All @@ -54,17 +55,22 @@ def __init__(
api_key (Optional[str]): The API key for authenticating with the
gemini service. (default: :obj:`None`)
url (Optional[str]): The url to the gemini service.
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `GeminiTokenCounter` will be
used.
"""
import os

import google.generativeai as genai
from google.generativeai.types.generation_types import GenerationConfig

super().__init__(model_type, model_config_dict, api_key, url)
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)
self._api_key = api_key or os.environ.get("GOOGLE_API_KEY")
genai.configure(api_key=self._api_key)
self._client = genai.GenerativeModel(self.model_type.value)
self._token_counter: Optional[BaseTokenCounter] = None

keys = list(self.model_config_dict.keys())
generation_config_dict = {
k: self.model_config_dict.pop(k)
Expand All @@ -78,6 +84,12 @@ def __init__(

@property
def token_counter(self) -> BaseTokenCounter:
r"""Initialize the token counter for the model backend.
Returns:
BaseTokenCounter: The token counter following the model's
tokenization style.
"""
if not self._token_counter:
self._token_counter = GeminiTokenCounter(self.model_type)
return self._token_counter
Expand Down
14 changes: 10 additions & 4 deletions camel/models/litellm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from camel.configs import LITELLM_API_PARAMS
from camel.messages import OpenAIMessage
from camel.types import ChatCompletion
from camel.utils import LiteLLMTokenCounter
from camel.utils import BaseTokenCounter, LiteLLMTokenCounter


class LiteLLMModel:
Expand All @@ -30,6 +30,7 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for LiteLLM backend.
Expand All @@ -42,11 +43,14 @@ def __init__(
model service. (default: :obj:`None`)
url (Optional[str]): The url to the model service. (default:
:obj:`None`)
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `LiteLLMTokenCounter` will
be used.
"""
self.model_type = model_type
self.model_config_dict = model_config_dict
self._client = None
self._token_counter: Optional[LiteLLMTokenCounter] = None
self._token_counter = token_counter
self.check_model_config()
self._url = url
self._api_key = api_key
Expand Down Expand Up @@ -98,8 +102,10 @@ def token_counter(self) -> LiteLLMTokenCounter:
tokenization style.
"""
if not self._token_counter:
self._token_counter = LiteLLMTokenCounter(self.model_type)
return self._token_counter
self._token_counter = LiteLLMTokenCounter( # type: ignore[assignment]
self.model_type
)
return self._token_counter # type: ignore[return-value]

def run(
self,
Expand Down
22 changes: 18 additions & 4 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from camel.models.vllm_model import VLLMModel
from camel.models.zhipuai_model import ZhipuAIModel
from camel.types import ModelPlatformType, ModelType
from camel.utils import BaseTokenCounter


class ModelFactory:
Expand All @@ -38,6 +39,7 @@ def create(
model_platform: ModelPlatformType,
model_type: Union[ModelType, str],
model_config_dict: Dict,
token_counter: Optional[BaseTokenCounter] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
) -> BaseModelBackend:
Expand All @@ -50,6 +52,10 @@ def create(
created can be a `str` for open source platforms.
model_config_dict (Dict): A dictionary that will be fed into
the backend constructor.
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, OpenAITokenCounter(ModelType.
GPT_3_5_TURBO) will be used if the model platform didn't
provide official token counter.
api_key (Optional[str]): The API key for authenticating with the
model service.
url (Optional[str]): The url to the model service.
Expand All @@ -64,7 +70,9 @@ def create(
if isinstance(model_type, ModelType):
if model_platform.is_open_source and model_type.is_open_source:
model_class = OpenSourceModel
return model_class(model_type, model_config_dict, url)
return model_class(
model_type, model_config_dict, url, token_counter
)
if model_platform.is_openai and model_type.is_openai:
model_class = OpenAIModel
elif model_platform.is_anthropic and model_type.is_anthropic:
Expand All @@ -83,10 +91,14 @@ def create(
elif isinstance(model_type, str):
if model_platform.is_ollama:
model_class = OllamaModel
return model_class(model_type, model_config_dict, url)
return model_class(
model_type, model_config_dict, url, token_counter
)
elif model_platform.is_vllm:
model_class = VLLMModel
return model_class(model_type, model_config_dict, url, api_key)
return model_class(
model_type, model_config_dict, url, api_key, token_counter
)
elif model_platform.is_litellm:
model_class = LiteLLMModel
else:
Expand All @@ -96,4 +108,6 @@ def create(
)
else:
raise ValueError(f"Invalid model type `{model_type}` provided.")
return model_class(model_type, model_config_dict, api_key, url)
return model_class(
model_type, model_config_dict, api_key, url, token_counter
)
7 changes: 5 additions & 2 deletions camel/models/ollama_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def __init__(
model_type: str,
model_config_dict: Dict[str, Any],
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for Ollama backend with OpenAI compatibility.
Expand All @@ -40,6 +41,9 @@ def __init__(
be fed into openai.ChatCompletion.create().
url (Optional[str]): The url to the model service. (default:
:obj:`None`)
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `OpenAITokenCounter(ModelType.
GPT_3_5_TURBO)` will be used.
"""
self.model_type = model_type
self.model_config_dict = model_config_dict
Expand All @@ -50,7 +54,7 @@ def __init__(
base_url=url,
api_key="ollama", # required but ignored
)
self._token_counter: Optional[BaseTokenCounter] = None
self._token_counter = token_counter
self.check_model_config()

@property
Expand All @@ -61,7 +65,6 @@ def token_counter(self) -> BaseTokenCounter:
BaseTokenCounter: The token counter following the model's
tokenization style.
"""
# NOTE: Use OpenAITokenCounter temporarily
if not self._token_counter:
self._token_counter = OpenAITokenCounter(ModelType.GPT_3_5_TURBO)
return self._token_counter
Expand Down
14 changes: 11 additions & 3 deletions camel/models/open_source_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@
from camel.messages import OpenAIMessage
from camel.models import BaseModelBackend
from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
from camel.utils import BaseTokenCounter, OpenSourceTokenCounter
from camel.utils import (
BaseTokenCounter,
OpenSourceTokenCounter,
)


class OpenSourceModel(BaseModelBackend):
Expand All @@ -33,6 +36,7 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for model backends of Open-source models.
Expand All @@ -43,9 +47,13 @@ def __init__(
api_key (Optional[str]): The API key for authenticating with the
model service. (ignored for open-source models)
url (Optional[str]): The url to the model service.
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `OpenSourceTokenCounter` will
be used.
"""
super().__init__(model_type, model_config_dict, api_key, url)
self._token_counter: Optional[BaseTokenCounter] = None
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)

# Check whether the input model type is open-source
if not model_type.is_open_source:
Expand Down
9 changes: 7 additions & 2 deletions camel/models/openai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for OpenAI backend.
Expand All @@ -48,8 +49,13 @@ def __init__(
OpenAI service. (default: :obj:`None`)
url (Optional[str]): The url to the OpenAI service. (default:
:obj:`None`)
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `OpenAITokenCounter` will
be used.
"""
super().__init__(model_type, model_config_dict, api_key, url)
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)
self._url = url or os.environ.get("OPENAI_API_BASE_URL")
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
self._client = OpenAI(
Expand All @@ -58,7 +64,6 @@ def __init__(
base_url=self._url,
api_key=self._api_key,
)
self._token_counter: Optional[BaseTokenCounter] = None

@property
def token_counter(self) -> BaseTokenCounter:
Expand Down
8 changes: 4 additions & 4 deletions camel/models/stub_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,12 @@ def __init__(
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""All arguments are unused for the dummy model."""
super().__init__(model_type, model_config_dict, api_key, url)
self._token_counter: Optional[BaseTokenCounter] = None
self._api_key = api_key
self._url = url
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)

@property
def token_counter(self) -> BaseTokenCounter:
Expand Down
7 changes: 5 additions & 2 deletions camel/models/vllm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def __init__(
model_config_dict: Dict[str, Any],
url: Optional[str] = None,
api_key: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
r"""Constructor for vLLM backend with OpenAI compatibility.
Expand All @@ -44,6 +45,9 @@ def __init__(
:obj:`None`)
api_key (Optional[str]): The API key for authenticating with the
model service.
token_counter (Optional[BaseTokenCounter]): Token counter to use
for the model. If not provided, `OpenAITokenCounter(ModelType.
GPT_3_5_TURBO)` will be used.
"""
self.model_type = model_type
self.model_config_dict = model_config_dict
Expand All @@ -54,7 +58,7 @@ def __init__(
base_url=url,
api_key=api_key,
)
self._token_counter: Optional[BaseTokenCounter] = None
self._token_counter = token_counter
self.check_model_config()

@property
Expand All @@ -65,7 +69,6 @@ def token_counter(self) -> BaseTokenCounter:
BaseTokenCounter: The token counter following the model's
tokenization style.
"""
# NOTE: Use OpenAITokenCounter temporarily
if not self._token_counter:
self._token_counter = OpenAITokenCounter(ModelType.GPT_3_5_TURBO)
return self._token_counter
Expand Down
Loading

0 comments on commit 9b21bae

Please sign in to comment.