-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
TextLLM: Add token usage to response meta (#338)
- Loading branch information
1 parent
d9ff3bd
commit 5c2eae4
Showing
2 changed files
with
36 additions
and
0 deletions.
There are no files selected for viewing
33 changes: 33 additions & 0 deletions
33
modules/text/module_text_llm/module_text_llm/helpers/models/callbacks.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
from langchain.callbacks.base import BaseCallbackHandler | ||
from langchain_core.outputs import LLMResult | ||
from langchain_core.messages.ai import UsageMetadata | ||
|
||
from athena import emit_meta, get_meta | ||
|
||
|
||
class UsageHandler(BaseCallbackHandler): | ||
def on_llm_end(self, response: LLMResult, **kwargs) -> None: | ||
meta = get_meta() | ||
|
||
total_usage = meta.get("total_usage", {"input_tokens": 0, "output_tokens": 0, "total_tokens": 0}) | ||
llm_calls = meta.get("llm_calls", []) | ||
|
||
for generations in response.generations: | ||
for generation in generations: | ||
message = generation.dict()["message"] | ||
generation_usage: UsageMetadata = message["usage_metadata"] | ||
model_name = message["response_metadata"].get("model_name", None) | ||
|
||
total_usage["input_tokens"] += generation_usage["input_tokens"] | ||
total_usage["output_tokens"] += generation_usage["output_tokens"] | ||
total_usage["total_tokens"] += generation_usage["total_tokens"] | ||
|
||
llm_calls.append({ | ||
"model_name": model_name, | ||
"input_tokens": generation_usage["input_tokens"], | ||
"output_tokens": generation_usage["output_tokens"], | ||
"total_tokens": generation_usage["total_tokens"], | ||
}) | ||
|
||
emit_meta("total_usage", total_usage) | ||
emit_meta("llm_calls", llm_calls) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters