diff --git a/examples/src/agents/quickstart.ts b/examples/src/agents/quickstart.ts index 7407c2f3dd66..1e588b5a2a1c 100644 --- a/examples/src/agents/quickstart.ts +++ b/examples/src/agents/quickstart.ts @@ -37,9 +37,7 @@ const vectorstore = await MemoryVectorStore.fromDocuments( ); const retriever = vectorstore.asRetriever(); -const retrieverResult = await retriever.invoke( - "how to upload a dataset" -); +const retrieverResult = await retriever.invoke("how to upload a dataset"); console.log(retrieverResult[0]); /* diff --git a/examples/src/callbacks/trace_groups.ts b/examples/src/callbacks/trace_groups.ts index b0e072ac313e..5d014f66b3b5 100644 --- a/examples/src/callbacks/trace_groups.ts +++ b/examples/src/callbacks/trace_groups.ts @@ -22,7 +22,10 @@ export const run = async () => { async (manager: CallbackManager, questions: string[]) => { await chain.invoke({ question: questions[0] }, manager); await chain.invoke({ question: questions[1] }, manager); - const finalResult = await chain.invoke({ question: questions[2] }, manager); + const finalResult = await chain.invoke( + { question: questions[2] }, + manager + ); return finalResult; }, [ diff --git a/examples/src/chains/llm_chain_cancellation.ts b/examples/src/chains/llm_chain_cancellation.ts index d677e0fca0e0..a41b56a2b2e2 100644 --- a/examples/src/chains/llm_chain_cancellation.ts +++ b/examples/src/chains/llm_chain_cancellation.ts @@ -20,11 +20,13 @@ try { const res = await chain.invoke( { product: "colorful socks", signal: controller.signal }, { - callbacks: [{ - handleLLMNewToken(token: string) { - process.stdout.write(token); - } - }], + callbacks: [ + { + handleLLMNewToken(token: string) { + process.stdout.write(token); + }, + }, + ], } ); } catch (e) { diff --git a/examples/src/chains/llm_chain_stream.ts b/examples/src/chains/llm_chain_stream.ts index 8f7e80de94a2..3b24982c7c39 100644 --- a/examples/src/chains/llm_chain_stream.ts +++ b/examples/src/chains/llm_chain_stream.ts @@ -10,14 +10,17 @@ const prompt = PromptTemplate.fromTemplate( const chain = new LLMChain({ llm: model, prompt }); // Call the chain with the inputs and a callback for the streamed tokens -const res = await chain.invoke({ product: "colorful socks" }, { - callbacks: [ - { - handleLLMNewToken(token: string) { - process.stdout.write(token); +const res = await chain.invoke( + { product: "colorful socks" }, + { + callbacks: [ + { + handleLLMNewToken(token: string) { + process.stdout.write(token); + }, }, - }, - ] -}); + ], + } +); console.log({ res }); // { res: { text: '\n\nKaleidoscope Socks' } } diff --git a/examples/src/chat/overview.ts b/examples/src/chat/overview.ts index e8ecd6581393..c46255a239c1 100644 --- a/examples/src/chat/overview.ts +++ b/examples/src/chat/overview.ts @@ -38,19 +38,18 @@ export const run = async () => { // Sending two separate prompts in parallel, receiving two responses back const responseA = await chat.invoke([ - - new SystemMessage( - "You are a helpful assistant that translates English to French." - ), - new HumanMessage( - "Translate this sentence from English to French. I love programming." - ), - new SystemMessage( - "You are a helpful assistant that translates English to French." - ), - new HumanMessage( - "Translate this sentence from English to French. I love artificial intelligence." - ), + new SystemMessage( + "You are a helpful assistant that translates English to French." + ), + new HumanMessage( + "Translate this sentence from English to French. I love programming." + ), + new SystemMessage( + "You are a helpful assistant that translates English to French." + ), + new HumanMessage( + "Translate this sentence from English to French. I love artificial intelligence." + ), ]); console.log(responseA); diff --git a/examples/src/experimental/babyagi/weather_with_tools.ts b/examples/src/experimental/babyagi/weather_with_tools.ts index 4badc7efc017..152b27e526f1 100644 --- a/examples/src/experimental/babyagi/weather_with_tools.ts +++ b/examples/src/experimental/babyagi/weather_with_tools.ts @@ -52,7 +52,9 @@ const babyAGI = BabyAGI.fromLLM({ maxIterations: 10, }); -await babyAGI.invoke({ objective: "Write a short weather report for SF today" }); +await babyAGI.invoke({ + objective: "Write a short weather report for SF today", +}); /* *****TASK LIST***** diff --git a/examples/src/indexes/vector_stores/vectara.ts b/examples/src/indexes/vector_stores/vectara.ts index 1b2af3df17d3..a865c7f75ecc 100644 --- a/examples/src/indexes/vector_stores/vectara.ts +++ b/examples/src/indexes/vector_stores/vectara.ts @@ -55,9 +55,7 @@ console.log(JSON.stringify(resultsWithScore, null, 2)); */ const retriever = new VectaraSummaryRetriever({ vectara: store, topK: 3 }); -const documents = await retriever.invoke( - "What were the women talking about?" -); +const documents = await retriever.invoke("What were the women talking about?"); console.log(JSON.stringify(documents, null, 2)); /* diff --git a/examples/src/models/chat/chat.ts b/examples/src/models/chat/chat.ts index 516a2c092fbb..934499c69e01 100644 --- a/examples/src/models/chat/chat.ts +++ b/examples/src/models/chat/chat.ts @@ -26,18 +26,18 @@ export const run = async () => { // Similar to LLMs, you can also use `generate` to generate chat completions for multiple sets of messages. const responseC = await chat.invoke([ - new SystemMessage( - "You are a helpful assistant that translates English to French." - ), - new HumanMessage( - "Translate this sentence from English to French. I love programming." - ), - new SystemMessage( - "You are a helpful assistant that translates English to French." - ), - new HumanMessage( - "Translate this sentence from English to French. I love artificial intelligence." - ), + new SystemMessage( + "You are a helpful assistant that translates English to French." + ), + new HumanMessage( + "Translate this sentence from English to French. I love programming." + ), + new SystemMessage( + "You are a helpful assistant that translates English to French." + ), + new HumanMessage( + "Translate this sentence from English to French. I love artificial intelligence." + ), ]); console.log(responseC); /* diff --git a/examples/src/retrievers/hyde.ts b/examples/src/retrievers/hyde.ts index b82a26eaccd2..5c9066a30cf8 100644 --- a/examples/src/retrievers/hyde.ts +++ b/examples/src/retrievers/hyde.ts @@ -21,9 +21,7 @@ await vectorStore.addDocuments( ].map((pageContent) => new Document({ pageContent })) ); -const results = await retriever.invoke( - "What is my favourite food?" -); +const results = await retriever.invoke("What is my favourite food?"); console.log(results); /* diff --git a/examples/src/retrievers/parent_document_retriever_chunk_header.ts b/examples/src/retrievers/parent_document_retriever_chunk_header.ts index 0def11cf67e9..9a2032ecd086 100644 --- a/examples/src/retrievers/parent_document_retriever_chunk_header.ts +++ b/examples/src/retrievers/parent_document_retriever_chunk_header.ts @@ -48,9 +48,7 @@ await retriever.addDocuments(pamDocs, { // This will search child documents in vector store with the help of chunk header, // returning the unmodified parent documents -const retrievedDocs = await retriever.invoke( - "What is Pam's favorite color?" -); +const retrievedDocs = await retriever.invoke("What is Pam's favorite color?"); // Pam's favorite color is returned first! console.log(JSON.stringify(retrievedDocs, null, 2)); diff --git a/examples/src/retrievers/similarity_score_threshold.ts b/examples/src/retrievers/similarity_score_threshold.ts index 3e39435f1f98..357cdaeedc2b 100644 --- a/examples/src/retrievers/similarity_score_threshold.ts +++ b/examples/src/retrievers/similarity_score_threshold.ts @@ -22,9 +22,7 @@ const retriever = ScoreThresholdRetriever.fromVectorStore(vectorStore, { kIncrement: 2, // How much to increase K by each time. It'll fetch N results, then N + kIncrement, then N + kIncrement * 2, etc. }); -const result = await retriever.invoke( - "What are buildings made out of?" -); +const result = await retriever.invoke("What are buildings made out of?"); console.log(result); diff --git a/examples/src/retrievers/time-weighted-retriever.ts b/examples/src/retrievers/time-weighted-retriever.ts index eaf5a32810ca..e3d745bfd3f4 100644 --- a/examples/src/retrievers/time-weighted-retriever.ts +++ b/examples/src/retrievers/time-weighted-retriever.ts @@ -22,9 +22,7 @@ const documents = [ // so that the correct access history metadata is populated await retriever.addDocuments(documents); -const results1 = await retriever.invoke( - "What is my favourite food?" -); +const results1 = await retriever.invoke("What is my favourite food?"); console.log(results1); @@ -34,9 +32,7 @@ console.log(results1); ] */ -const results2 = await retriever.invoke( - "What is my favourite food?" -); +const results2 = await retriever.invoke("What is my favourite food?"); console.log(results2);