From 2443ef17d3ed914af3d9116aad83a0d1ee09e5ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bilge=20Y=C3=BCcel?= Date: Tue, 6 Feb 2024 16:30:04 +0300 Subject: [PATCH] Fix secret management breaking change (#296) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix secret management breaking change * Update content/blog/customizing-rag-to-summarize-hacker-news-posts-with-haystack2/index.md Co-authored-by: Tuana Çelik * Update index.md --------- Co-authored-by: Tuana Çelik --- content/blog/astradb-haystack-integration/index.md | 5 +++-- .../index.md | 7 ++++--- content/blog/mixtral-8x7b-healthcare-chatbot/index.md | 5 +++-- content/blog/using-jina-embeddings-haystack/index.md | 9 +++++---- content/overview/quick-start.md | 3 ++- 5 files changed, 17 insertions(+), 12 deletions(-) diff --git a/content/blog/astradb-haystack-integration/index.md b/content/blog/astradb-haystack-integration/index.md index aa3ae8a9..679b4370 100644 --- a/content/blog/astradb-haystack-integration/index.md +++ b/content/blog/astradb-haystack-integration/index.md @@ -48,8 +48,9 @@ Remember earlier when I mentioned you were going to need your credentials? I hop ```python from getpass import getpass +import os -OPENAI_API_KEY = getpass("Enter your openAI key:") +os.environ["OPENAI_API_KEY"] = getpass("Enter your openAI key:") ASTRA_DB_ID = getpass("Enter your Astra database ID:") ASTRA_DB_APPLICATION_TOKEN = getpass("Enter your Astra application token (e.g.AstraCS:xxx ):") ASTRA_DB_REGION = getpass("Enter your AstraDB Region: ") @@ -140,7 +141,7 @@ rag_pipeline.add_component( ) rag_pipeline.add_component(instance=AstraRetriever(document_store=document_store), name="retriever") rag_pipeline.add_component(instance=PromptBuilder(template=prompt_template), name="prompt_builder") -rag_pipeline.add_component(instance=OpenAIGenerator(api_key=OPENAI_API_KEY), name="llm") +rag_pipeline.add_component(instance=OpenAIGenerator(), name="llm") rag_pipeline.add_component(instance=AnswerBuilder(), name="answer_builder") rag_pipeline.connect("embedder", "retriever") rag_pipeline.connect("retriever", "prompt_builder.documents") diff --git a/content/blog/customizing-rag-to-summarize-hacker-news-posts-with-haystack2/index.md b/content/blog/customizing-rag-to-summarize-hacker-news-posts-with-haystack2/index.md index 1d5207c5..d173b31f 100644 --- a/content/blog/customizing-rag-to-summarize-hacker-news-posts-with-haystack2/index.md +++ b/content/blog/customizing-rag-to-summarize-hacker-news-posts-with-haystack2/index.md @@ -117,8 +117,9 @@ First, we initialize all of the components we will need for the pipeline: ```python from haystack import Pipeline from haystack.components.builders.prompt_builder import PromptBuilder -from haystack.components.generators import OpenAIGenerator - +from haystack.components.generators import OpenAIGenerator +from haystack.utils import Secret + prompt_template = """ You will be provided a few of the latest posts in HackerNews, followed by their URL. For each post, provide a brief summary followed by the URL the full post can be found at. @@ -131,7 +132,7 @@ Posts: """ prompt_builder = PromptBuilder(template=prompt_template) -llm = OpenAIGenerator(mode="gpt-4", api_key='YOUR_API_KEY') +llm = OpenAIGenerator(mode="gpt-4", api_key=Secret.from_token('YOUR_API_KEY')) fetcher = HackernewsNewestFetcher() ``` Next, we add the components to a Pipeline: diff --git a/content/blog/mixtral-8x7b-healthcare-chatbot/index.md b/content/blog/mixtral-8x7b-healthcare-chatbot/index.md index 6e7cf420..6cb41411 100644 --- a/content/blog/mixtral-8x7b-healthcare-chatbot/index.md +++ b/content/blog/mixtral-8x7b-healthcare-chatbot/index.md @@ -89,11 +89,12 @@ So now our flow is as follows: First, initialize the LLMs and warm them up. ```python from haystack.components.generators import HuggingFaceTGIGenerator +from haystack.utils import Secret -keyword_llm = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1", token=huggingface_token) +keyword_llm = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1", token=Secret.from_token(huggingface_token)) keyword_llm.warm_up() -llm = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1", token=huggingface_token) +llm = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1", token=Secret.from_token(huggingface_token)) llm.warm_up() ``` diff --git a/content/blog/using-jina-embeddings-haystack/index.md b/content/blog/using-jina-embeddings-haystack/index.md index 12f67c87..1bd873a1 100644 --- a/content/blog/using-jina-embeddings-haystack/index.md +++ b/content/blog/using-jina-embeddings-haystack/index.md @@ -55,10 +55,11 @@ pip install jina-haystack chroma-haystack pypdf Then let's input our credentials. Or you can set them as environment variables instead if you're feeling fancy. ```python -import getpass +from getpass import getpass +import os -jina_api_key = getpass.getpass("JINA api key:") -hf_token = getpass.getpass("Enter your HuggingFace api token:") +jina_api_key = getpass("JINA api key:") +os.environ["HF_API_TOKEN"] = getpass("Enter your HuggingFace api token: ") ``` ## Building the indexing pipeline @@ -147,7 +148,7 @@ question: {{question}} """ text_embedder = JinaTextEmbedder(api_key=jina_api_key, model="jina-embeddings-v2-base-en") -generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1", token=hf_token) +generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1") generator.warm_up() prompt_builder = PromptBuilder(template=prompt) diff --git a/content/overview/quick-start.md b/content/overview/quick-start.md index 5efe6d00..03aa2048 100644 --- a/content/overview/quick-start.md +++ b/content/overview/quick-start.md @@ -49,6 +49,7 @@ Then, index your data to the DocumentStore, build a RAG pipeline, and ask a ques import os from haystack import Pipeline, Document +from haystack.utils import Secret from haystack.document_stores.in_memory import InMemoryDocumentStore from haystack.components.retrievers.in_memory import InMemoryBM25Retriever from haystack.components.generators import OpenAIGenerator @@ -75,7 +76,7 @@ Answer: retriever = InMemoryBM25Retriever(document_store=document_store) prompt_builder = PromptBuilder(template=prompt_template) -llm = OpenAIGenerator(api_key=api_key) +llm = OpenAIGenerator(api_key=Secret.from_token(api_key)) rag_pipeline = Pipeline() rag_pipeline.add_component("retriever", retriever)