Skip to content

Commit

Permalink
json mode standard test
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Aug 16, 2024
1 parent b83f1eb commit 9d2d45a
Show file tree
Hide file tree
Showing 7 changed files with 69 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,7 @@ def chat_model_params(self) -> dict:
@pytest.mark.xfail(reason="Not yet implemented.")
def test_tool_message_histories_list_content(self, model: BaseChatModel) -> None:
super().test_tool_message_histories_list_content(model)

@property
def supports_json_mode(self) -> bool:
return True
4 changes: 4 additions & 0 deletions libs/partners/groq/tests/integration_tests/test_standard.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ def chat_model_class(self) -> Type[BaseChatModel]:
def test_tool_message_histories_list_content(self, model: BaseChatModel) -> None:
super().test_tool_message_histories_list_content(model)

@property
def supports_json_mode(self) -> bool:
return True


class TestGroqLlama(BaseTestGroq):
@property
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,7 @@ def chat_model_class(self) -> Type[BaseChatModel]:
@property
def chat_model_params(self) -> dict:
return {"model": "mistral-large-latest", "temperature": 0}

@property
def supports_json_mode(self) -> bool:
return True
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,11 @@ def chat_model_params(self) -> dict:
@pytest.mark.xfail(reason="Not yet supported.")
def test_usage_metadata_streaming(self, model: BaseChatModel) -> None:
super().test_usage_metadata_streaming(model)

@property
def supports_image_inputs(self) -> bool:
return True

@property
def supports_json_mode(self) -> bool:
return True
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,7 @@ def chat_model_params(self) -> dict:
@property
def supports_image_inputs(self) -> bool:
return True

@property
def supports_json_mode(self) -> bool:
return True
Original file line number Diff line number Diff line change
Expand Up @@ -509,3 +509,44 @@ def test_tool_message_error_status(self, model: BaseChatModel) -> None:
]
result = model_with_tools.invoke(messages)
assert isinstance(result, AIMessage)

def test_json_mode(self, model: BaseChatModel) -> None:
if not self.supports_json_mode:
pytest.skip("Test requires json mode support.")

from pydantic import BaseModel as BaseModelProper
from pydantic import Field as FieldProper

class Joke(BaseModelProper):
"""Joke to tell user."""

setup: str = FieldProper(description="question to set up a joke")
punchline: str = FieldProper(description="answer to resolve the joke")

# Pydantic class
# Type ignoring since the interface only officially supports pydantic 1
# or pydantic.v1.BaseModel but not pydantic.BaseModel from pydantic 2.
# We'll need to do a pass updating the type signatures.
chat = model.with_structured_output(Joke, method="json_mode") # type: ignore[arg-type]
msg = (
"Tell me a joke about cats. Return the result as a JSON with 'setup' and "
"'punchline' keys. Return nothing other than JSON."
)
result = chat.invoke(msg)
assert isinstance(result, Joke)

for chunk in chat.stream(msg):
assert isinstance(chunk, Joke)

# Schema
chat = model.with_structured_output(
Joke.model_json_schema(), method="json_mode"
)
result = chat.invoke(msg)
assert isinstance(result, dict)
assert set(result.keys()) == {"setup", "punchline"}

for chunk in chat.stream(msg):
assert isinstance(chunk, dict)
assert isinstance(chunk, dict) # for mypy
assert set(chunk.keys()) == {"setup", "punchline"}
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,10 @@ def returns_usage_metadata(self) -> bool:
def supports_anthropic_inputs(self) -> bool:
return False

@property
def supports_json_mode(self) -> bool:
return False


class ChatModelUnitTests(ChatModelTests):
@property
Expand Down

0 comments on commit 9d2d45a

Please sign in to comment.