Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(ai): add error handling to python ai sdk #174

Merged
merged 7 commits into from
Jan 24, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
## 3.10.0 - 2025-01-24

1. Add `$ai_error` and `$ai_is_error` properties to LangChain callback handler, OpenAI, and Anthropic.

## 3.9.2 - 2025-01-22

1. Fix importing of LangChain callback handler under certain circumstances.
Expand Down
2 changes: 2 additions & 0 deletions posthog/ai/langchain/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,8 @@ def on_llm_error(
"$ai_latency": latency,
"$ai_trace_id": trace_id,
"$ai_base_url": run.get("base_url"),
"$ai_is_error": True,
"$ai_error": error.__str__(),
**self._properties,
}
if self._distinct_id is None:
Expand Down
12 changes: 12 additions & 0 deletions posthog/ai/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,12 +116,17 @@ def call_llm_and_track_usage(
error = None
http_status = 200
usage: Dict[str, Any] = {}
error_params: Dict[str, any] = {}

try:
response = call_method(**kwargs)
except Exception as exc:
error = exc
http_status = getattr(exc, "status_code", 0) # default to 0 becuase its likely an SDK error
error_params = {
"$ai_is_error": True,
"$ai_error": exc.__str__(),
}
finally:
end_time = time.time()
latency = end_time - start_time
Expand Down Expand Up @@ -149,6 +154,7 @@ def call_llm_and_track_usage(
"$ai_trace_id": posthog_trace_id,
"$ai_base_url": str(base_url),
**(posthog_properties or {}),
**(error_params or {}),
}

if posthog_distinct_id is None:
Expand Down Expand Up @@ -186,12 +192,17 @@ async def call_llm_and_track_usage_async(
error = None
http_status = 200
usage: Dict[str, Any] = {}
error_params: Dict[str, any] = {}

try:
response = await call_async_method(**kwargs)
except Exception as exc:
error = exc
http_status = getattr(exc, "status_code", 0) # default to 0 because its likely an SDK error
error_params = {
"$ai_is_error": True,
"$ai_error": exc.__str__(),
}
finally:
end_time = time.time()
latency = end_time - start_time
Expand Down Expand Up @@ -219,6 +230,7 @@ async def call_llm_and_track_usage_async(
"$ai_trace_id": posthog_trace_id,
"$ai_base_url": str(base_url),
**(posthog_properties or {}),
**(error_params or {}),
}

if posthog_distinct_id is None:
Expand Down
14 changes: 14 additions & 0 deletions posthog/test/ai/anthropic/test_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,3 +325,17 @@ async def test_async_streaming_system_prompt(mock_client, mock_anthropic_stream)
{"role": "system", "content": "You must always answer with 'Bar'."},
{"role": "user", "content": "Foo"},
]


def test_error(mock_client, mock_anthropic_response):
with patch("anthropic.resources.Messages.create", side_effect=Exception("Test error")):
client = Anthropic(api_key="test-key", posthog_client=mock_client)
with pytest.raises(Exception):
client.messages.create(model="claude-3-opus-20240229", messages=[{"role": "user", "content": "Hello"}])

assert mock_client.capture.call_count == 1

call_args = mock_client.capture.call_args[1]
props = call_args["properties"]
assert props["$ai_is_error"] is True
assert props["$ai_error"] == "Test error"
14 changes: 14 additions & 0 deletions posthog/test/ai/openai/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,3 +173,17 @@ def test_privacy_mode_global(mock_client, mock_openai_response):
props = call_args["properties"]
assert props["$ai_input"] is None
assert props["$ai_output_choices"] is None


def test_error(mock_client, mock_openai_response):
with patch("openai.resources.chat.completions.Completions.create", side_effect=Exception("Test error")):
client = OpenAI(api_key="test-key", posthog_client=mock_client)
with pytest.raises(Exception):
client.chat.completions.create(model="gpt-4", messages=[{"role": "user", "content": "Hello"}])
Comment on lines +180 to +182
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If it's an HTTP error, say a 400, do we still capture the $ai_error content then?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.


assert mock_client.capture.call_count == 1

call_args = mock_client.capture.call_args[1]
props = call_args["properties"]
assert props["$ai_is_error"] is True
assert props["$ai_error"] == "Test error"
2 changes: 1 addition & 1 deletion posthog/version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
VERSION = "3.9.3"
VERSION = "3.10.0"

if __name__ == "__main__":
print(VERSION, end="") # noqa: T201
Loading