Skip to content

Commit

Permalink
truncation tests
Browse files Browse the repository at this point in the history
  • Loading branch information
JuliaS92 committed Jan 15, 2025
1 parent b15bd72 commit 2a5312f
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 2 deletions.
2 changes: 1 addition & 1 deletion alphastats/llm/llm_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def _truncate_conversation_history(
warnings.warn(
f"Removing corresponsing tool output as well.\nRemoved message:{removed_toolmessage[MessageKeys.ROLE]}: {removed_toolmessage[MessageKeys.CONTENT][0:30]}..."
)
if len(self._messages) == 0:
if len(self._messages) == oldest_not_pinned:
raise ValueError(
"Truncating conversation history failed, as the artifact from the last call exceeds the token limit. Please increase the token limit and reset the LLM analysis."
)
Expand Down
48 changes: 47 additions & 1 deletion tests/llm/test_llm_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def test_append_message_with_tool_calls(llm_integration):
(10, 20, 100, 5), # Should truncate to 5 messages
],
)
def test_truncate_conversation_history(
def test_truncate_conversation_history_success(
llm_integration, num_messages, message_length, max_tokens, expected_messages
):
"""Test conversation history truncation with different scenarios"""
Expand All @@ -197,6 +197,52 @@ def test_truncate_conversation_history(
assert llm_integration._messages[0]["role"] == "system"


def test_truncate_conversation_history_pinned_too_large(llm_integration):
"""Test conversation history truncation with pinned messages that exceed the token limit"""
# Add multiple messages
message_content = "Test " * 100
llm_integration._max_tokens = 200
llm_integration._append_message("user", message_content.strip(), pin_message=True)
llm_integration._append_message("user", message_content.strip(), pin_message=False)
with pytest.raises(ValueError, match=r".*all remaining messages are pinned.*"):
llm_integration._append_message(
"assistant", message_content.strip(), pin_message=True
)


def test_truncate_conversation_history_tool_output_popped(llm_integration):
message_content = "Test " * 50
llm_integration._max_tokens = 120
# removal of assistant would suffice for total tokens, but tool output should be dropped as well
llm_integration._append_message("assistant", message_content.strip())
llm_integration._append_message("tool", message_content.strip())
with pytest.warns(
UserWarning, match=r".*Truncating conversation history.*"
), pytest.warns(UserWarning, match=r".*Removing corresponsing tool.*"):
llm_integration._append_message("user", message_content.strip())

assert len(llm_integration._messages) == 2
assert llm_integration._messages[0]["role"] == "system"
assert llm_integration._messages[1]["role"] == "user"


def test_truncate_conversation_history_last_tool_output_error(llm_integration):
message_content = "Test " * 50
llm_integration._max_tokens = 100
# removal of assistant would suffice for total tokens, but tool output should be dropped as well
llm_integration._append_message("assistant", message_content.strip())
with pytest.raises(ValueError, match=r".*last call exceeds the token limit.*"):
llm_integration._append_message("tool", message_content.strip())


def test_truncate_conversation_history_single_large_message(llm_integration):
llm_integration._max_tokens = 1
with pytest.raises(
ValueError, match=r".*only remaining message exceeds the token limit*"
):
llm_integration._truncate_conversation_history()


def test_estimate_tokens_gpt(llm_integration):
"""Test token estimation for a given message"""
message_content = "Test message"
Expand Down

0 comments on commit 2a5312f

Please sign in to comment.