Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
Yunnglin committed Dec 26, 2024
1 parent d224988 commit 95c9fbe
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 159 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ async def astream(self, request: Any) -> AsyncIterator[str]:
class ModelScopeCommon(BaseModel):
"""Common parameters for Modelscope LLMs."""

client: Any
client: Any = Field(default=None)
base_url: str = MODELSCOPE_SERVICE_URL_BASE
modelscope_sdk_token: Optional[SecretStr] = Field(default=None, alias="api_key")
model_name: str = Field(default="Qwen/Qwen2.5-Coder-32B-Instruct", alias="model")
Expand Down
151 changes: 0 additions & 151 deletions libs/community/langchain_community/llms/modelscope_pipeline.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@


def test_modelscope_chat_call() -> None:
chat = ModelScopeChatEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct")
chat = ModelScopeChatEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct") # type: ignore
response = chat.invoke([HumanMessage(content="Say foo:")])
assert isinstance(response, BaseMessage)
assert isinstance(response.content, str)


def test_modelscope_chat_multiple_history() -> None:
"""Tests multiple history works."""
chat = ModelScopeChatEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct")
chat = ModelScopeChatEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct") # type: ignore

response = chat.invoke(
[
Expand All @@ -29,7 +29,7 @@ def test_modelscope_chat_multiple_history() -> None:

def test_modelscope_chat_stream() -> None:
"""Test that stream works."""
chat = ModelScopeChatEndpoint(
chat = ModelScopeChatEndpoint( # type: ignore
model="Qwen/Qwen2.5-Coder-32B-Instruct",
streaming=True,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@

def test_modelscope_call() -> None:
"""Test valid call to Modelscope."""
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct")
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct") # type: ignore
output = llm.invoke("Say foo:")
assert isinstance(output, str)


def test_modelscope_streaming() -> None:
"""Test streaming call to Modelscope."""
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct")
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct") # type: ignore
generator = llm.stream("write a quick sort in python")
stream_results_string = ""
assert isinstance(generator, Iterator)
Expand All @@ -26,13 +26,13 @@ def test_modelscope_streaming() -> None:


async def test_modelscope_call_async() -> None:
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct")
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct") # type: ignore
output = await llm.ainvoke("write a quick sort in python")
assert isinstance(output, str)


async def test_modelscope_streaming_async() -> None:
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct")
llm = ModelScopeEndpoint(model="Qwen/Qwen2.5-Coder-32B-Instruct") # type: ignore
generator = llm.astream("write a quick sort in python")
stream_results_string = ""
assert isinstance(generator, AsyncIterator)
Expand Down
Empty file.

0 comments on commit 95c9fbe

Please sign in to comment.