Skip to content

Commit

Permalink
chore: Langfuse - replace DynamicChatPromptBuilder with ChatPromptB…
Browse files Browse the repository at this point in the history
…uilder (#925)
  • Loading branch information
vblagoje authored Jul 30, 2024
1 parent 2f6f134 commit b8e2623
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 11 deletions.
8 changes: 3 additions & 5 deletions integrations/langfuse/example/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
os.environ["HAYSTACK_CONTENT_TRACING_ENABLED"] = "true"

from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from haystack_integrations.components.connectors.langfuse import LangfuseConnector
Expand All @@ -12,7 +12,7 @@

pipe = Pipeline()
pipe.add_component("tracer", LangfuseConnector("Chat example"))
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))

pipe.connect("prompt_builder.prompt", "llm.messages")
Expand All @@ -22,8 +22,6 @@
ChatMessage.from_user("Tell me about {{location}}"),
]

response = pipe.run(
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}}
)
response = pipe.run(data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "template": messages}})
print(response["llm"]["replies"][0])
print(response["tracer"]["trace_url"])
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class LangfuseConnector:
# ...
@app.on_event("shutdown")
async def shutdown_event():
tracer.actual_tracer.flush()
Expand All @@ -53,27 +54,35 @@ async def shutdown_event():
os.environ["HAYSTACK_CONTENT_TRACING_ENABLED"] = "true"
from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from haystack_integrations.components.connectors.langfuse import LangfuseConnector
from haystack_integrations.components.connectors.langfuse import (
LangfuseConnector,
)
if __name__ == "__main__":
pipe = Pipeline()
pipe.add_component("tracer", LangfuseConnector("Chat example"))
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))
pipe.connect("prompt_builder.prompt", "llm.messages")
messages = [
ChatMessage.from_system("Always respond in German even if some input data is in other languages."),
ChatMessage.from_system(
"Always respond in German even if some input data is in other languages."
),
ChatMessage.from_user("Tell me about {{location}}"),
]
response = pipe.run(
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}}
data={
"prompt_builder": {
"template_variables": {"location": "Berlin"},
"template": messages,
}
}
)
print(response["llm"]["replies"][0])
print(response["tracer"]["trace_url"])
Expand Down

0 comments on commit b8e2623

Please sign in to comment.