Skip to content

Commit

Permalink
Replaces occurences of dynamic prompt builders (#250)
Browse files Browse the repository at this point in the history
Co-authored-by: Amna Mubashar <[email protected]>
  • Loading branch information
Amnah199 and Amna Mubashar authored Aug 2, 2024
1 parent e3a4916 commit 451f266
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 13 deletions.
8 changes: 4 additions & 4 deletions integrations/anthropic.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ Below is an example RAG Pipeline where we answer a predefined question using the

```python
from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.components.converters import HTMLToDocument
from haystack.components.fetchers import LinkContentFetcher
from haystack.components.generators.utils import print_streaming_chunk
Expand All @@ -75,7 +75,7 @@ messages = [
rag_pipeline = Pipeline()
rag_pipeline.add_component("fetcher", LinkContentFetcher())
rag_pipeline.add_component("converter", HTMLToDocument())
rag_pipeline.add_component("prompt_builder", DynamicChatPromptBuilder(runtime_variables=["documents"]))
rag_pipeline.add_component("prompt_builder", ChatPromptBuilder())
rag_pipeline.add_component(
"llm",
AnthropicChatGenerator(
Expand All @@ -88,13 +88,13 @@ rag_pipeline.add_component(

rag_pipeline.connect("fetcher", "converter")
rag_pipeline.connect("converter", "prompt_builder")
rag_pipeline.connect("prompt_builder", "llm")
rag_pipeline.connect("prompt_builder.prompt", "llm.messages")

question = "What are the best practices in prompt engineering?"
rag_pipeline.run(
data={
"fetcher": {"urls": ["https://docs.anthropic.com/claude/docs/prompt-engineering"]},
"prompt_builder": {"template_variables": {"query": question}, "prompt_source": messages},
"prompt_builder": {"template_variables": {"query": question}, "template": messages},
}
)
```
Expand Down
4 changes: 2 additions & 2 deletions integrations/cohere.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,13 +117,13 @@ Similar to the above example, you can also use [`CohereChatGenerator`](https://d

```python
from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.dataclasses import ChatMessage
from haystack_integrations.components.generators.cohere.chat import CohereChatGenerator


pipe = Pipeline()
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", CohereChatGenerator())
pipe.connect("prompt_builder", "llm")

Expand Down
4 changes: 2 additions & 2 deletions integrations/context-ai.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ import uuid
import os

from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack import Pipeline
from haystack.dataclasses import ChatMessage

Expand All @@ -70,7 +70,7 @@ model = "gpt-3.5-turbo"
os.environ["GETCONTEXT_TOKEN"] = "GETCONTEXT_TOKEN"
os.environ["OPENAI_API_KEY"] = "OPENAI_API_KEY"

prompt_builder = DynamicChatPromptBuilder()
prompt_builder = ChatPromptBuilder()
llm = OpenAIChatGenerator(model=model)
prompt_analytics = ContextAIAnalytics()
assistant_analytics = ContextAIAnalytics()
Expand Down
6 changes: 3 additions & 3 deletions integrations/langfuse.md
Original file line number Diff line number Diff line change
Expand Up @@ -132,14 +132,14 @@ Once you've run these code samples, you can also [use the Langfuse dashboard to

```python
from haystack import Pipeline
from haystack.components.builders import DynamicChatPromptBuilder
from haystack.components.builders import ChatPromptBuilder
from haystack.components.generators.chat import OpenAIChatGenerator
from haystack.dataclasses import ChatMessage
from haystack_integrations.components.connectors.langfuse import LangfuseConnector

pipe = Pipeline()
pipe.add_component("tracer", LangfuseConnector("Chat example"))
pipe.add_component("prompt_builder", DynamicChatPromptBuilder())
pipe.add_component("prompt_builder", ChatPromptBuilder())
pipe.add_component("llm", OpenAIChatGenerator(model="gpt-3.5-turbo"))

pipe.connect("prompt_builder.prompt", "llm.messages")
Expand All @@ -149,7 +149,7 @@ messages = [
]

response = pipe.run(
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "prompt_source": messages}}
data={"prompt_builder": {"template_variables": {"location": "Berlin"}, "template": messages}}
)
print(response["llm"]["replies"][0])
print(response["tracer"]["trace_url"])
Expand Down
5 changes: 3 additions & 2 deletions integrations/mistral.md
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ from haystack.document_stores.in_memory import InMemoryDocumentStore
from haystack_integrations.components.embedders.mistral.document_embedder import MistralDocumentEmbedder
from haystack_integrations.components.embedders.mistral.text_embedder import MistralTextEmbedder
from haystack.components.retrievers.in_memory import InMemoryEmbeddingRetriever
from haystack.components.builders import ChatPromptBuilder

os.environ["MISTRAL_API_KEY"] = "YOUR_MISTRAL_API_KEY"

Expand All @@ -140,7 +141,7 @@ document_store.write_documents(documents)

text_embedder = MistralTextEmbedder()
retriever = InMemoryEmbeddingRetriever(document_store=document_store)
prompt_builder = DynamicChatPromptBuilder(runtime_variables=["documents"])
prompt_builder = ChatPromptBuilder()
llm = MistralChatGenerator(streaming_callback=print_streaming_chunk)

messages = [ChatMessage.from_user("Here are some the documents: {{documents}} \\n Answer: {{query}}")]
Expand All @@ -161,7 +162,7 @@ question = "Who lives in Berlin?"
result = rag_pipeline.run(
{
"text_embedder": {"text": question},
"prompt_builder": {"template_variables": {"query": question}, "prompt_source": messages},
"prompt_builder": {"template_variables": {"query": question}, "template": messages},
"llm": {"generation_kwargs": {"max_tokens": 165}},
}
)
Expand Down

0 comments on commit 451f266

Please sign in to comment.