Skip to content

Commit

Permalink
updating test cases
Browse files Browse the repository at this point in the history
Signed-off-by: hansrajr <[email protected]>
  • Loading branch information
Hansrajr committed Nov 27, 2024
1 parent bb7e64d commit 62118eb
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 5 deletions.
6 changes: 3 additions & 3 deletions src/monocle_apptrace/wrap_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ async def allm_wrapper(tracer, to_wrap, wrapped, instance, args, kwargs):
return_value = await wrapped(*args, **kwargs)
kwargs.update({"provider_name": provider_name, "inference_endpoint": inference_endpoint or getattr(instance, 'endpoint', None)})
process_span(to_wrap, span, instance, args, kwargs, return_value)
update_span_from_llm_response(response=return_value, span=span, instance=instance, args=args)
update_span_from_llm_response(response=return_value, span=span, instance=instance)

return return_value

Expand All @@ -264,7 +264,7 @@ def llm_wrapper(tracer: Tracer, to_wrap, wrapped, instance, args, kwargs):
return_value = wrapped(*args, **kwargs)
kwargs.update({"provider_name": provider_name, "inference_endpoint": inference_endpoint or getattr(instance, 'endpoint', None)})
process_span(to_wrap, span, instance, args, kwargs, return_value)
update_span_from_llm_response(response=return_value, span=span, instance=instance, args=args)
update_span_from_llm_response(response=return_value, span=span, instance=instance)

return return_value

Expand Down Expand Up @@ -363,7 +363,7 @@ def get_input_from_args(chain_args):
return ""


def update_span_from_llm_response(response, span: Span, instance, args):
def update_span_from_llm_response(response, span: Span, instance):
if (response is not None and isinstance(response, dict) and "meta" in response) or (
response is not None and hasattr(response, "response_metadata")):
token_usage = None
Expand Down
3 changes: 1 addition & 2 deletions tests/langchain_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,7 @@ def test_llm_response(self):
}
)
instance = MagicMock()
args = MagicMock()
update_span_from_llm_response(span=span, response=message, instance=instance, args=args)
update_span_from_llm_response(span=span, response=message, instance=instance)
event_found = False
for event in span.events:
if event.name == "metadata":
Expand Down
14 changes: 14 additions & 0 deletions tests/langchain_workflow_name_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

import pytest
import requests
from markdown_it.rules_inline.backticks import regex

from dummy_class import DummyClass
from embeddings_wrapper import HuggingFaceEmbeddings
from http_span_exporter import HttpSpanExporter
Expand Down Expand Up @@ -166,6 +168,18 @@ def test_llm_chain(self, test_name, test_input_infra, test_output_infra, mock_po

assert root_span["attributes"]["entity.1.name"] == "test"
assert root_span["attributes"]["entity.1.type"] == "workflow.langchain"
input_found = False
output_found = False

for event in root_span['events']:
if event['name'] == "data.input" and event['attributes']['input'] == query:
input_found = True
elif event['name'] == "data.output" and event['attributes']['response'] == self.ragText:
output_found = True

assert input_found
assert output_found


finally:
os.environ.pop(test_input_infra)
Expand Down

0 comments on commit 62118eb

Please sign in to comment.