diff --git a/langchain/src/chat_models/base.ts b/langchain/src/chat_models/base.ts index 04cbf5d94176..2f2bc76cfbc9 100644 --- a/langchain/src/chat_models/base.ts +++ b/langchain/src/chat_models/base.ts @@ -102,7 +102,9 @@ async function _initChatModelHelper( return new ChatGoogleGenerativeAI({ model, ...kwargs }); } case "fireworks": { - throw new Error("Dynamic imports from @langchain/community not supported."); + throw new Error( + "Dynamic imports from @langchain/community not supported." + ); // _checkPackage("@langchain/community/chat_models/fireworks"); // const { ChatFireworks } = await import( // "@langchain/community/chat_models/fireworks" @@ -115,7 +117,9 @@ async function _initChatModelHelper( return new ChatOllama({ model, ...kwargs }); } case "together": { - throw new Error("Dynamic imports from @langchain/community not supported."); + throw new Error( + "Dynamic imports from @langchain/community not supported." + ); // _checkPackage("@langchain/community/chat_models/togetherai"); // const { ChatTogetherAI } = await import( // "@langchain/community/chat_models/togetherai" diff --git a/libs/langchain-aws/src/tests/embeddings.int.test.ts b/libs/langchain-aws/src/tests/embeddings.int.test.ts index 3dd370693c44..d3649bb47541 100644 --- a/libs/langchain-aws/src/tests/embeddings.int.test.ts +++ b/libs/langchain-aws/src/tests/embeddings.int.test.ts @@ -70,15 +70,12 @@ test.skip("Test end to end with MemoryVectorStore", async () => { // }) // ); // expect(vectorStore.memoryVectors).toHaveLength(3); - // const resultOne = await vectorStore.similaritySearch("hello world", 1); // const resultOneMetadatas = resultOne.map(({ metadata }) => metadata); // expect(resultOneMetadatas).toEqual([{ id: 2 }]); - // const resultTwo = await vectorStore.similaritySearch("hello world", 2); // const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata); // expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }]); - // const resultThree = await vectorStore.similaritySearch("hello world", 3); // const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata); // expect(resultThreeMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]); diff --git a/libs/langchain-groq/src/tests/agent.int.test.ts b/libs/langchain-groq/src/tests/agent.int.test.ts index cc88211e89e0..0e40a382d36c 100644 --- a/libs/langchain-groq/src/tests/agent.int.test.ts +++ b/libs/langchain-groq/src/tests/agent.int.test.ts @@ -18,7 +18,6 @@ test.skip("Model is compatible with OpenAI tools agent and Agent Executor", asyn // ["human", "{input}"], // ["placeholder", "{agent_scratchpad}"], // ]); - // const currentWeatherTool = new DynamicStructuredTool({ // name: "get_current_weather", // description: "Get the current weather in a given location", @@ -29,22 +28,18 @@ test.skip("Model is compatible with OpenAI tools agent and Agent Executor", asyn // }), // func: async () => Promise.resolve("28 °C"), // }); - // const agent = await createOpenAIToolsAgent({ // llm, // tools: [currentWeatherTool], // prompt, // }); - // const agentExecutor = new AgentExecutor({ // agent, // tools: [currentWeatherTool], // }); - // const input = "What's the weather like in Paris?"; // const { output } = await agentExecutor.invoke({ input }); - // console.log(output); // expect(output).toBeDefined(); // expect(output).toContain("The current temperature in Paris is 28 °C"); -}); \ No newline at end of file +}); diff --git a/libs/langchain-mistralai/src/tests/agent.int.test.ts b/libs/langchain-mistralai/src/tests/agent.int.test.ts index 3383d9efc35c..6fcbc49be579 100644 --- a/libs/langchain-mistralai/src/tests/agent.int.test.ts +++ b/libs/langchain-mistralai/src/tests/agent.int.test.ts @@ -50,19 +50,16 @@ test("Model is compatible with OpenAI tools agent and Agent Executor", async () // temperature: 0, // model: "mistral-large-latest", // }); - // const systemMessage = SystemMessagePromptTemplate.fromTemplate( // "You are an agent capable of retrieving current weather information." // ); // const humanMessage = HumanMessagePromptTemplate.fromTemplate("{input}"); // const agentScratchpad = new MessagesPlaceholder("agent_scratchpad"); - // const prompt = ChatPromptTemplate.fromMessages([ // systemMessage, // humanMessage, // agentScratchpad, // ]); - // const currentWeatherTool = new DynamicStructuredTool({ // name: "get_current_weather", // description: "Get the current weather in a given location", @@ -73,22 +70,18 @@ test("Model is compatible with OpenAI tools agent and Agent Executor", async () // }), // func: async () => Promise.resolve("28 °C"), // }); - // const agent = await createOpenAIToolsAgent({ // llm, // tools: [currentWeatherTool], // prompt, // }); - // const agentExecutor = new AgentExecutor({ // agent, // tools: [currentWeatherTool], // }); - // const input = "What's the weather like in Paris?"; // const { output } = await agentExecutor.invoke({ input }); - // console.log(output); // expect(output).toBeDefined(); // expect(output).toContain("The current temperature in Paris is 28 °C"); -}); \ No newline at end of file +}); diff --git a/libs/langchain-mistralai/src/tests/chat_models.int.test.ts b/libs/langchain-mistralai/src/tests/chat_models.int.test.ts index 15649d842603..7280ceddacf4 100644 --- a/libs/langchain-mistralai/src/tests/chat_models.int.test.ts +++ b/libs/langchain-mistralai/src/tests/chat_models.int.test.ts @@ -1,7 +1,5 @@ import { test } from "@jest/globals"; -import { - ChatPromptTemplate, -} from "@langchain/core/prompts"; +import { ChatPromptTemplate } from "@langchain/core/prompts"; import { StructuredTool } from "@langchain/core/tools"; import { z } from "zod"; import {