From b8e2623014be9ab7410d98832e25eda46923b04c Mon Sep 17 00:00:00 2001 From: Vladimir Blagojevic Date: Tue, 30 Jul 2024 09:04:39 +0100 Subject: [PATCH] chore: `Langfuse` - replace DynamicChatPromptBuilder with ChatPromptBuilder (#925) --- integrations/langfuse/example/chat.py | 8 +++---- .../connectors/langfuse/langfuse_connector.py | 21 +++++++++++++------ 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/integrations/langfuse/example/chat.py b/integrations/langfuse/example/chat.py index 99ed7a238..443d65a13 100644 --- a/integrations/langfuse/example/chat.py +++ b/integrations/langfuse/example/chat.py @@ -3,7 +3,7 @@ os.environ["HAYSTACK_CONTENT_TRACING_ENABLED"] = "true" from haystack import Pipeline -from haystack.components.builders import DynamicChatPromptBuilder +from haystack.components.builders import ChatPromptBuilder from haystack.components.generators.chat import OpenAIChatGenerator from haystack.dataclasses import ChatMessage from haystack_integrations.components.connectors.langfuse import LangfuseConnector @@ -12,7 +12,7 @@ pipe = Pipeline() pipe.add_component("tracer", LangfuseConnector("Chat example")) - pipe.add_component("prompt_builder", DynamicChatPromptBuilder()) + pipe.add_component("prompt_builder", ChatPromptBuilder()) pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo")) pipe.connect("prompt_builder.prompt", "llm.messages") @@ -22,8 +22,6 @@ ChatMessage.from_user("Tell me about {{location}}"), ] - response = pipe.run( - data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}} - ) + response = pipe.run(data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "template": messages}}) print(response["llm"]["replies"][0]) print(response["tracer"]["trace_url"]) diff --git a/integrations/langfuse/src/haystack_integrations/components/connectors/langfuse/langfuse_connector.py b/integrations/langfuse/src/haystack_integrations/components/connectors/langfuse/langfuse_connector.py index cfe150317..51703823e 100644 --- a/integrations/langfuse/src/haystack_integrations/components/connectors/langfuse/langfuse_connector.py +++ b/integrations/langfuse/src/haystack_integrations/components/connectors/langfuse/langfuse_connector.py @@ -40,6 +40,7 @@ class LangfuseConnector: # ... + @app.on_event("shutdown") async def shutdown_event(): tracer.actual_tracer.flush() @@ -53,27 +54,35 @@ async def shutdown_event(): os.environ["HAYSTACK_CONTENT_TRACING_ENABLED"] = "true" from haystack import Pipeline - from haystack.components.builders import DynamicChatPromptBuilder + from haystack.components.builders import ChatPromptBuilder from haystack.components.generators.chat import OpenAIChatGenerator from haystack.dataclasses import ChatMessage - from haystack_integrations.components.connectors.langfuse import LangfuseConnector + from haystack_integrations.components.connectors.langfuse import ( + LangfuseConnector, + ) if __name__ == "__main__": - pipe = Pipeline() pipe.add_component("tracer", LangfuseConnector("Chat example")) - pipe.add_component("prompt_builder", DynamicChatPromptBuilder()) + pipe.add_component("prompt_builder", ChatPromptBuilder()) pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo")) pipe.connect("prompt_builder.prompt", "llm.messages") messages = [ - ChatMessage.from_system("Always respond in German even if some input data is in other languages."), + ChatMessage.from_system( + "Always respond in German even if some input data is in other languages." + ), ChatMessage.from_user("Tell me about {{location}}"), ] response = pipe.run( - data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}} + data={ + "prompt_builder": { + "template_variables": {"location": "Berlin"}, + "template": messages, + } + } ) print(response["llm"]["replies"][0]) print(response["tracer"]["trace_url"])