Skip to content

Commit

Permalink
Merge branch 'main' into feat/checkCompatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
ArzelaAscoIi authored Jul 31, 2024
2 parents 50c6d1e + f0b619e commit 5d1ce71
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 8 deletions.
6 changes: 3 additions & 3 deletions integrations/langfuse/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ os.environ["LANGFUSE_HOST"] = "https://cloud.langfuse.com"
os.environ["TOKENIZERS_PARALLELISM"] = "false"
os.environ["HAYSTACK_CONTENT_TRACING_ENABLED"] = "true"

from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from haystack import Pipeline
Expand All @@ -46,7 +46,7 @@ from haystack_integrations.components.connectors.langfuse import LangfuseConnect
if __name__ == "__main__":
pipe = Pipeline()
pipe.add_component("tracer", LangfuseConnector("Chat example"))
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))

pipe.connect("prompt_builder.prompt", "llm.messages")
Expand All @@ -57,7 +57,7 @@ if __name__ == "__main__":
]

response = pipe.run(
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}}
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "template": messages}}
)
print(response["llm"]["replies"][0])
print(response["tracer"]["trace_url"])
Expand Down
8 changes: 3 additions & 5 deletions integrations/langfuse/tests/test_tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import requests

from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from requests.auth import HTTPBasicAuth
Expand All @@ -26,7 +26,7 @@ def test_tracing_integration():

pipe = Pipeline()
pipe.add_component("tracer", LangfuseConnector(name="Chat example", public=True)) # public so anyone can verify run
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))

pipe.connect("prompt_builder.prompt", "llm.messages")
Expand All @@ -36,9 +36,7 @@ def test_tracing_integration():
ChatMessage.from_user("Tell me about {{location}}"),
]

response = pipe.run(
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}}
)
response = pipe.run(data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "template": messages}})
assert "Berlin" in response["llm"]["replies"][0].content
assert response["tracer"]["trace_url"]
url = "https://cloud.langfuse.com/api/public/traces/"
Expand Down

0 comments on commit 5d1ce71

Please sign in to comment.