Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: use text instead of content for ChatMessage in Llama.cpp, Langfuse and Mistral #1238

Merged
merged 1 commit into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion integrations/langfuse/tests/test_tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def test_tracing_integration(llm_class, env_var, expected_trace):
"tracer": {"invocation_context": {"user_id": "user_42"}},
}
)
assert "Berlin" in response["llm"]["replies"][0].content
assert "Berlin" in response["llm"]["replies"][0].text
assert response["tracer"]["trace_url"]

trace_url = response["tracer"]["trace_url"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def _convert_message_to_llamacpp_format(message: ChatMessage) -> Dict[str, str]:
- `content`
- `name` (optional)
"""
formatted_msg = {"role": message.role.value, "content": message.content}
formatted_msg = {"role": message.role.value, "content": message.text}
if message.name:
formatted_msg["name"] = message.name

Expand Down
30 changes: 15 additions & 15 deletions integrations/llama_cpp/tests/test_chat_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def test_run_with_valid_message(self, generator_mock):
assert isinstance(result["replies"], list)
assert len(result["replies"]) == 1
assert isinstance(result["replies"][0], ChatMessage)
assert result["replies"][0].content == "Generated text"
assert result["replies"][0].text == "Generated text"
assert result["replies"][0].role == ChatRole.ASSISTANT

def test_run_with_generation_kwargs(self, generator_mock):
Expand All @@ -183,7 +183,7 @@ def test_run_with_generation_kwargs(self, generator_mock):
mock_model.create_chat_completion.return_value = mock_output
generation_kwargs = {"max_tokens": 128}
result = generator.run([ChatMessage.from_system("Write a 200 word paragraph.")], generation_kwargs)
assert result["replies"][0].content == "Generated text"
assert result["replies"][0].text == "Generated text"
assert result["replies"][0].meta["finish_reason"] == "length"

@pytest.mark.integration
Expand All @@ -206,7 +206,7 @@ def test_run(self, generator):
assert "replies" in result
assert isinstance(result["replies"], list)
assert len(result["replies"]) > 0
assert any(answer.lower() in reply.content.lower() for reply in result["replies"])
assert any(answer.lower() in reply.text.lower() for reply in result["replies"])

@pytest.mark.integration
def test_run_rag_pipeline(self, generator):
Expand Down Expand Up @@ -270,7 +270,7 @@ def test_run_rag_pipeline(self, generator):

replies = result["llm"]["replies"]
assert len(replies) > 0
assert any("bioluminescent waves" in reply.content for reply in replies)
assert any("bioluminescent waves" in reply.text.lower() for reply in replies)
assert all(reply.role == ChatRole.ASSISTANT for reply in replies)

@pytest.mark.integration
Expand Down Expand Up @@ -308,15 +308,15 @@ def test_json_constraining(self, generator):
assert len(result["replies"]) > 0
assert all(reply.role == ChatRole.ASSISTANT for reply in result["replies"])
for reply in result["replies"]:
assert json.loads(reply.content)
assert isinstance(json.loads(reply.content), dict)
assert "people" in json.loads(reply.content)
assert isinstance(json.loads(reply.content)["people"], list)
assert all(isinstance(person, dict) for person in json.loads(reply.content)["people"])
assert all("name" in person for person in json.loads(reply.content)["people"])
assert all("age" in person for person in json.loads(reply.content)["people"])
assert all(isinstance(person["name"], str) for person in json.loads(reply.content)["people"])
assert all(isinstance(person["age"], int) for person in json.loads(reply.content)["people"])
assert json.loads(reply.text)
assert isinstance(json.loads(reply.text), dict)
assert "people" in json.loads(reply.text)
assert isinstance(json.loads(reply.text)["people"], list)
assert all(isinstance(person, dict) for person in json.loads(reply.text)["people"])
assert all("name" in person for person in json.loads(reply.text)["people"])
assert all("age" in person for person in json.loads(reply.text)["people"])
assert all(isinstance(person["name"], str) for person in json.loads(reply.text)["people"])
assert all(isinstance(person["age"], int) for person in json.loads(reply.text)["people"])


class TestLlamaCppChatGeneratorFunctionary:
Expand Down Expand Up @@ -431,8 +431,8 @@ def test_function_call_and_execute(self, generator):
second_response = generator.run(messages=messages)
assert "replies" in second_response
assert len(second_response["replies"]) > 0
assert any("San Francisco" in reply.content for reply in second_response["replies"])
assert any("72" in reply.content for reply in second_response["replies"])
assert any("San Francisco" in reply.text for reply in second_response["replies"])
assert any("72" in reply.text for reply in second_response["replies"])


class TestLlamaCppChatGeneratorChatML:
Expand Down
4 changes: 2 additions & 2 deletions integrations/mistral/tests/test_mistral_chat_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def test_live_run(self):
results = component.run(chat_messages)
assert len(results["replies"]) == 1
message: ChatMessage = results["replies"][0]
assert "Paris" in message.content
assert "Paris" in message.text
assert "mistral-tiny" in message.meta["model"]
assert message.meta["finish_reason"] == "stop"

Expand Down Expand Up @@ -249,7 +249,7 @@ def __call__(self, chunk: StreamingChunk) -> None:

assert len(results["replies"]) == 1
message: ChatMessage = results["replies"][0]
assert "Paris" in message.content
assert "Paris" in message.text

assert "mistral-tiny" in message.meta["model"]
assert message.meta["finish_reason"] == "stop"
Expand Down
Loading