Skip to content

Commit

Permalink
add modelscope endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
Yunnglin committed Dec 26, 2024
1 parent 5991b45 commit 79b8adb
Show file tree
Hide file tree
Showing 6 changed files with 503 additions and 0 deletions.
5 changes: 5 additions & 0 deletions libs/community/langchain_community/chat_models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@
from langchain_community.chat_models.mlx import (
ChatMLX,
)
from langchain_community.chat_models.modelscope_endpoint import (
ModelscopeChatEndpoint,
)
from langchain_community.chat_models.moonshot import (
MoonshotChat,
)
Expand Down Expand Up @@ -251,6 +254,7 @@
"JinaChat",
"LlamaEdgeChatService",
"MiniMaxChat",
"ModelscopeChatEndpoint",
"MoonshotChat",
"PaiEasChatEndpoint",
"PromptLayerChatOpenAI",
Expand Down Expand Up @@ -316,6 +320,7 @@
"JinaChat": "langchain_community.chat_models.jinachat",
"LlamaEdgeChatService": "langchain_community.chat_models.llama_edge",
"MiniMaxChat": "langchain_community.chat_models.minimax",
"ModelscopeChatEndpoint": "langchain_community.chat_models.modelscope_endpoint",
"MoonshotChat": "langchain_community.chat_models.moonshot",
"PaiEasChatEndpoint": "langchain_community.chat_models.pai_eas_endpoint",
"PromptLayerChatOpenAI": "langchain_community.chat_models.promptlayer_openai",
Expand Down
128 changes: 128 additions & 0 deletions libs/community/langchain_community/chat_models/modelscope_endpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
"""Wrapper around modelscope chat endpoint models."""

from typing import Dict

from langchain_core.utils import (
convert_to_secret_str,
get_from_dict_or_env,
pre_init,
)

from langchain_community.chat_models import ChatOpenAI
from langchain_community.llms.modelscope_endpoint import (
MODELSCOPE_SERVICE_URL_BASE,
ModelscopeCommon,
)


class ModelscopeChatEndpoint(ModelscopeCommon, ChatOpenAI): # type: ignore[misc, override, override]
"""Modelscope chat model inference api integration. To use, must have a modelscope account and a modelscope sdk token.
Refer to https://modelscope.cn/docs/model-service/API-Inference/intro for more details.
Setup:
Install ``openai`` and set environment variables ``MODELSCOPE_SDK_TOKEN``.
.. code-block:: bash
pip install openai
export MODELSCOPE_SDK_TOKEN="your-modelscope-sdk-token"
Key init args — completion params:
model: str
Name of Modelscope model to use. Refer to https://modelscope.cn/docs/model-service/API-Inference/intro for available models.
temperature: Optional[float]
Sampling temperature, defaults to 0.3.
max_tokens: Optional[int]
Max number of tokens to generate, defaults to 1024.
Key init args — client params:
modelscope_sdk_token: Optional[str]
Modelscope SDK Token. If not passed in will be read from env var MODELSCOPE_SDK_TOKEN.
api_base: Optional[str]
Base URL for API requests.
See full list of supported init args and their descriptions in the params section.
Instantiate:
.. code-block:: python
from langchain_community.chat_models import ModelscopeChatEndpoint
chat = ModelscopeChatEndpoint(
modelscope_sdk_token="your-modelscope-sdk-token",
model="Qwen/Qwen2.5-Coder-32B-Instruct",
temperature=0.5,
# api_base="...",
# other params...
)
Invoke:
.. code-block:: python
messages = [
("system", "你擅长编程"),
("human", "写一个快速排序的代码"),
]
chat.invoke(messages)
.. code-block:: python
AIMessage(
content='def quick_sort(arr): ...',
additional_kwargs={},
response_metadata={
'token_usage': {
'completion_tokens': 312,
'prompt_tokens': 27,
'total_tokens': 339
},
'model_name': 'Qwen/Qwen2.5-Coder-32B-Instruct',
'system_fingerprint': None,
'finish_reason': 'stop',
'logprobs': None
},
id='run-71c03f4e-6628-41d5-beb6-d2559ae68266-0'
)
Stream:
.. code-block:: python
for chunk in chat.stream(messages):
print(chunk)
""" # noqa: E501

@pre_init
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the environment is set up correctly."""
values["modelscope_sdk_token"] = convert_to_secret_str(
get_from_dict_or_env(
values,
["modelscope_sdk_token", "api_key"],
"MODELSCOPE_SDK_TOKEN",
)
)

try:
import openai

except ImportError:
raise ImportError(
"Could not import openai python package. "
"Please install it with `pip install openai`."
)

client_params = {
"api_key": values["modelscope_sdk_token"].get_secret_value(),
"base_url": values["base_url"]
if "base_url" in values
else MODELSCOPE_SERVICE_URL_BASE,
}

if not values.get("client"):
values["client"] = openai.OpenAI(**client_params).chat.completions
if not values.get("async_client"):
values["async_client"] = openai.AsyncOpenAI(
**client_params
).chat.completions

return values
10 changes: 10 additions & 0 deletions libs/community/langchain_community/llms/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,12 @@ def _import_modal() -> Type[BaseLLM]:
return Modal


def _import_modelscope_endpoint() -> Type[BaseLLM]:
from langchain_community.llms.modelscope_endpoint import ModelscopeEndpoint

return ModelscopeEndpoint


def _import_mosaicml() -> Type[BaseLLM]:
from langchain_community.llms.mosaicml import MosaicML

Expand Down Expand Up @@ -785,6 +791,8 @@ def __getattr__(name: str) -> Any:
return _import_mlx_pipeline()
elif name == "Modal":
return _import_modal()
elif name == "ModelscopeEndpoint":
return _import_modelscope_endpoint()
elif name == "MosaicML":
return _import_mosaicml()
elif name == "NLPCloud":
Expand Down Expand Up @@ -947,6 +955,7 @@ def __getattr__(name: str) -> Any:
"MlflowAIGateway",
"MLXPipeline",
"Modal",
"ModelscopeEndpoint",
"MosaicML",
"NIBittensorLLM",
"NLPCloud",
Expand Down Expand Up @@ -1052,6 +1061,7 @@ def get_type_to_cls_dict() -> Dict[str, Callable[[], Type[BaseLLM]]]:
"mlflow-ai-gateway": _import_mlflow_ai_gateway,
"mlx_pipeline": _import_mlx_pipeline,
"modal": _import_modal,
"modelscope_endpoint": _import_modelscope_endpoint,
"mosaic": _import_mosaicml,
"nebula": _import_symblai_nebula,
"nibittensor": _import_bittensor,
Expand Down
Loading

0 comments on commit 79b8adb

Please sign in to comment.