diff --git a/libs/langchain-google-gauth/src/tests/chat_models.int.test.ts b/libs/langchain-google-gauth/src/tests/chat_models.int.test.ts deleted file mode 100644 index 83146ac72e00..000000000000 --- a/libs/langchain-google-gauth/src/tests/chat_models.int.test.ts +++ /dev/null @@ -1,226 +0,0 @@ -import { expect, test } from "@jest/globals"; -import { BaseLanguageModelInput } from "@langchain/core/language_models/base"; -import { ChatPromptValue } from "@langchain/core/prompt_values"; -import { - AIMessage, - AIMessageChunk, - BaseMessage, - BaseMessageChunk, - BaseMessageLike, - HumanMessage, - // MessageContentComplex, - SystemMessage, - ToolMessage, -} from "@langchain/core/messages"; -import { GeminiTool } from "@langchain/google-common"; -import { ChatGoogle } from "../chat_models.js"; -import { GoogleLLM } from "../llms.js"; - -describe("GAuth Chat", () => { - test("platform", async () => { - const model = new GoogleLLM(); - expect(model.platform).toEqual("gcp"); - }); - - test("invoke", async () => { - const model = new ChatGoogle(); - try { - const res = await model.invoke("What is 1 + 1?"); - expect(res).toBeDefined(); - expect(res._getType()).toEqual("ai"); - - const aiMessage = res as AIMessageChunk; - expect(aiMessage.content).toBeDefined(); - - expect(typeof aiMessage.content).toBe("string"); - const text = aiMessage.content as string; - expect(text).toMatch(/(1 + 1 (equals|is|=) )?2.? ?/); - - /* - expect(aiMessage.content.length).toBeGreaterThan(0); - expect(aiMessage.content[0]).toBeDefined(); - const content = aiMessage.content[0] as MessageContentComplex; - expect(content).toHaveProperty("type"); - expect(content.type).toEqual("text"); - - const textContent = content as MessageContentText; - expect(textContent.text).toBeDefined(); - expect(textContent.text).toEqual("2"); - */ - } catch (e) { - console.error(e); - throw e; - } - }); - - test("generate", async () => { - const model = new ChatGoogle(); - try { - const messages: BaseMessage[] = [ - new SystemMessage( - "You will reply to all requests to flip a coin with either H, indicating heads, or T, indicating tails." - ), - new HumanMessage("Flip it"), - new AIMessage("T"), - new HumanMessage("Flip the coin again"), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeDefined(); - expect(res._getType()).toEqual("ai"); - - const aiMessage = res as AIMessageChunk; - expect(aiMessage.content).toBeDefined(); - expect(["H", "T"]).toContainEqual(aiMessage.content); - } catch (e) { - console.error(e); - throw e; - } - }); - - test("stream", async () => { - const model = new ChatGoogle(); - try { - const input: BaseLanguageModelInput = new ChatPromptValue([ - new SystemMessage( - "You will reply to all requests to flip a coin with either H, indicating heads, or T, indicating tails." - ), - new HumanMessage("Flip it"), - new AIMessage("T"), - new HumanMessage("Flip the coin again"), - ]); - const res = await model.stream(input); - const resArray: BaseMessageChunk[] = []; - for await (const chunk of res) { - resArray.push(chunk); - } - expect(resArray).toBeDefined(); - expect(resArray.length).toBeGreaterThanOrEqual(1); - - const lastChunk = resArray[resArray.length - 1]; - expect(lastChunk).toBeDefined(); - expect(lastChunk._getType()).toEqual("ai"); - const aiChunk = lastChunk as AIMessageChunk; - console.log(aiChunk); - - console.log(JSON.stringify(resArray, null, 2)); - } catch (e) { - console.error(e); - throw e; - } - }); - - test("function", async () => { - const tools: GeminiTool[] = [ - { - functionDeclarations: [ - { - name: "test", - description: - "Run a test with a specific name and get if it passed or failed", - parameters: { - type: "object", - properties: { - testName: { - type: "string", - description: "The name of the test that should be run.", - }, - }, - required: ["testName"], - }, - }, - ], - }, - ]; - const model = new ChatGoogle().bind({ tools }); - const result = await model.invoke("Run a test on the cobalt project"); - expect(result).toHaveProperty("content"); - expect(result.content).toBe(""); - const args = result?.lc_kwargs?.additional_kwargs; - expect(args).toBeDefined(); - expect(args).toHaveProperty("tool_calls"); - expect(Array.isArray(args.tool_calls)).toBeTruthy(); - expect(args.tool_calls).toHaveLength(1); - const call = args.tool_calls[0]; - expect(call).toHaveProperty("type"); - expect(call.type).toBe("function"); - expect(call).toHaveProperty("function"); - const func = call.function; - expect(func).toBeDefined(); - expect(func).toHaveProperty("name"); - expect(func.name).toBe("test"); - expect(func).toHaveProperty("arguments"); - expect(typeof func.arguments).toBe("string"); - expect(func.arguments.replaceAll("\n", "")).toBe('{"testName":"cobalt"}'); - }); - - test("function reply", async () => { - const tools: GeminiTool[] = [ - { - functionDeclarations: [ - { - name: "test", - description: - "Run a test with a specific name and get if it passed or failed", - parameters: { - type: "object", - properties: { - testName: { - type: "string", - description: "The name of the test that should be run.", - }, - }, - required: ["testName"], - }, - }, - ], - }, - ]; - const model = new ChatGoogle().bind({ tools }); - const toolResult = { - testPassed: true, - }; - const messages: BaseMessageLike[] = [ - new HumanMessage("Run a test on the cobalt project."), - new AIMessage("", { - tool_calls: [ - { - id: "test", - type: "function", - function: { - name: "test", - arguments: '{"testName":"cobalt"}', - }, - }, - ], - }), - new ToolMessage(JSON.stringify(toolResult), "test"), - ]; - const res = await model.stream(messages); - const resArray: BaseMessageChunk[] = []; - for await (const chunk of res) { - resArray.push(chunk); - } - console.log(JSON.stringify(resArray, null, 2)); - }); - - test("withStructuredOutput", async () => { - const tool = { - name: "get_weather", - description: - "Get the weather of a specific location and return the temperature in Celsius.", - parameters: { - type: "object", - properties: { - location: { - type: "string", - description: "The name of city to get the weather for.", - }, - }, - required: ["location"], - }, - }; - const model = new ChatGoogle().withStructuredOutput(tool); - const result = await model.invoke("What is the weather in Paris?"); - expect(result).toHaveProperty("location"); - }); -}); diff --git a/libs/langchain-google-gauth/src/tests/llms.int.test.ts b/libs/langchain-google-gauth/src/tests/llms.int.test.ts deleted file mode 100644 index 61d2b77e11be..000000000000 --- a/libs/langchain-google-gauth/src/tests/llms.int.test.ts +++ /dev/null @@ -1,236 +0,0 @@ -import { test } from "@jest/globals"; -import { - AIMessage, - BaseMessage, - HumanMessageChunk, - MessageContentComplex, -} from "@langchain/core/messages"; -import { ChatPromptValue } from "@langchain/core/prompt_values"; -import { GoogleLLM } from "../llms.js"; - -const imgData = { - blueSquare: - "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAIAAAACUFjqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH6AIbFwQSRaexCAAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUHAAAAJklEQVQY02P8//8/A27AxIAXsEAor31f0CS2OfEQ1j2Q0owU+RsAGNUJD2/04PgAAAAASUVORK5CYII=", -}; - -describe("GAuth LLM", () => { - test("platform", async () => { - const model = new GoogleLLM(); - expect(model.platform).toEqual("gcp"); - }); - - test("call", async () => { - const model = new GoogleLLM(); - try { - const res = await model.invoke("1 + 1 = "); - if (res.length === 1) { - expect(res).toBe("2"); - } else { - expect(res.length).toBeGreaterThan(0); - console.log("call result:", res); - } - } catch (xx) { - console.error(xx); - throw xx; - } - }); - - test("generate", async () => { - const model = new GoogleLLM(); - const res = await model.generate(["Print hello world."]); - expect(res).toHaveProperty("generations"); - expect(res.generations.length).toBeGreaterThan(0); - expect(res.generations[0].length).toBeGreaterThan(0); - expect(res.generations[0][0]).toHaveProperty("text"); - console.log("generate result:", JSON.stringify(res, null, 2)); - }); - - test("stream", async () => { - const model = new GoogleLLM(); - const stream = await model.stream( - "What is the answer to live, the universe, and everything? Be verbose." - ); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - expect(chunks.length).toBeGreaterThan(1); - }); - - test("predictMessage image", async () => { - const model = new GoogleLLM({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeInstanceOf(AIMessage); - expect(Array.isArray(res.content)).toEqual(true); - expect(res.content[0]).toHaveProperty("text"); - console.log("res", res); - }); - - test("invoke image", async () => { - const model = new GoogleLLM({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const input = new ChatPromptValue(messages); - const res = await model.invoke(input); - expect(res).toBeDefined(); - expect(res.length).toBeGreaterThan(0); - console.log("res", res); - }); -}); - -describe.skip("GAuth LLM gai", () => { - test("platform", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - expect(model.platform).toEqual("gai"); - }); - - /* - * This test currently fails in AI Studio due to zealous safety systems - */ - test.skip("call", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - try { - const res = await model.invoke("1 + 1 = "); - if (res.length === 1) { - expect(res).toBe("2"); - } else { - console.log("call result:", res); - expect(res.length).toBeGreaterThan(0); - } - } catch (xx) { - console.error(xx); - throw xx; - } - }); - - test("call", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - try { - const res = await model.invoke("If the time is 1:00, what time is it?"); - expect(res.length).toBeGreaterThan(0); - expect(res.substring(0, 4)).toEqual("1:00"); - } catch (xx) { - console.error(xx); - throw xx; - } - }); - - test("generate", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - const res = await model.generate(["Print hello world."]); - expect(res).toHaveProperty("generations"); - expect(res.generations.length).toBeGreaterThan(0); - expect(res.generations[0].length).toBeGreaterThan(0); - expect(res.generations[0][0]).toHaveProperty("text"); - console.log("generate result:", JSON.stringify(res, null, 2)); - }); - - test("stream", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - const stream = await model.stream( - "What is the answer to live, the universe, and everything? Be verbose." - ); - const chunks = []; - try { - for await (const chunk of stream) { - chunks.push(chunk); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (xx: any) { - expect(xx?.message).toEqual("Finish reason: RECITATION"); - } - expect(chunks.length).toBeGreaterThan(1); - }); - - test("predictMessage image", async () => { - const model = new GoogleLLM({ - platformType: "gai", - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeInstanceOf(AIMessage); - expect(Array.isArray(res.content)).toEqual(true); - expect(res.content[0]).toHaveProperty("text"); - console.log("res", res); - }); - - test("invoke image", async () => { - const model = new GoogleLLM({ - platformType: "gai", - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const input = new ChatPromptValue(messages); - const res = await model.invoke(input); - expect(res).toBeDefined(); - expect(res.length).toBeGreaterThan(0); - console.log("res", res); - }); -}); diff --git a/libs/langchain-google-vertexai-web/src/tests/chat_models.int.test.ts b/libs/langchain-google-vertexai-web/src/tests/chat_models.int.test.ts index e737d61d0bdf..38db387d36dc 100644 --- a/libs/langchain-google-vertexai-web/src/tests/chat_models.int.test.ts +++ b/libs/langchain-google-vertexai-web/src/tests/chat_models.int.test.ts @@ -166,6 +166,27 @@ describe("Google APIKey Chat", () => { console.log(res2); expect(res2.content).toContain("24"); }); + + test("withStructuredOutput", async () => { + const tool = { + name: "get_weather", + description: + "Get the weather of a specific location and return the temperature in Celsius.", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The name of city to get the weather for.", + }, + }, + required: ["location"], + }, + }; + const model = new ChatVertexAI().withStructuredOutput(tool); + const result = await model.invoke("What is the weather in Paris?"); + expect(result).toHaveProperty("location"); + }); }); describe("Google Webauth Chat", () => { diff --git a/libs/langchain-google-vertexai-web/src/tests/llms.int.test.ts b/libs/langchain-google-vertexai-web/src/tests/llms.int.test.ts index 2b8155710edf..cf92f36d2bae 100644 --- a/libs/langchain-google-vertexai-web/src/tests/llms.int.test.ts +++ b/libs/langchain-google-vertexai-web/src/tests/llms.int.test.ts @@ -108,86 +108,6 @@ describe("Google APIKey LLM", () => { }); }); -describe("Google WebAuth LLM", () => { - test("platform", async () => { - const model = new VertexAI(); - expect(model.platform).toEqual("gcp"); - }); - - test("call", async () => { - const model = new VertexAI(); - const res = await model.invoke("1 + 1 = "); - if (res.length === 1) { - expect(res).toBe("2"); - } else { - expect(res.length).toBeGreaterThan(0); - console.log("call result:", res); - } - }); - - test("stream", async () => { - const model = new VertexAI(); - const stream = await model.stream( - "What is the answer to live, the universe, and everything? Be verbose." - ); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - expect(chunks.length).toBeGreaterThan(1); - }); - - test("predictMessage image", async () => { - const model = new VertexAI({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeInstanceOf(AIMessage); - expect(Array.isArray(res.content)).toEqual(true); - expect(res.content[0]).toHaveProperty("text"); - console.log("res", res); - }); - - test("invoke image", async () => { - const model = new VertexAI({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const input = new ChatPromptValue(messages); - const res = await model.invoke(input); - expect(res).toBeDefined(); - expect(res.length).toBeGreaterThan(0); - console.log("res", res); - }); -}); - describe("Google WebAuth gai LLM", () => { test("platform", async () => { const model = new VertexAI({ diff --git a/libs/langchain-google-vertexai/src/tests/chat_models.int.test.ts b/libs/langchain-google-vertexai/src/tests/chat_models.int.test.ts index 0f43cadc95aa..4ba3eccdf073 100644 --- a/libs/langchain-google-vertexai/src/tests/chat_models.int.test.ts +++ b/libs/langchain-google-vertexai/src/tests/chat_models.int.test.ts @@ -1,6 +1,4 @@ import { test } from "@jest/globals"; -// eslint-disable-next-line import/no-extraneous-dependencies -import { z } from "zod"; import { BaseLanguageModelInput } from "@langchain/core/language_models/base"; import { ChatPromptValue } from "@langchain/core/prompt_values"; import { @@ -8,21 +6,15 @@ import { AIMessageChunk, BaseMessage, BaseMessageChunk, + BaseMessageLike, HumanMessage, - // MessageContentComplex, - // MessageContentText, SystemMessage, + ToolMessage, } from "@langchain/core/messages"; -import { ConsoleCallbackHandler } from "@langchain/core/tracers/console"; import { ChatVertexAI } from "../chat_models.js"; -import { VertexAI } from "../llms.js"; +import { GeminiTool } from "../types.js"; describe("GAuth Chat", () => { - test("platform", async () => { - const model = new VertexAI(); - expect(model.platform).toEqual("gcp"); - }); - test("invoke", async () => { const model = new ChatVertexAI(); try { @@ -126,29 +118,118 @@ describe("GAuth Chat", () => { } }); - test("structuredOutput", async () => { - const handler = new ConsoleCallbackHandler(); - - const calculatorSchema = z.object({ - operation: z - .enum(["add", "subtract", "multiply", "divide"]) - .describe("The type of operation to execute"), - number1: z.number().describe("The first number to operate on."), - number2: z.number().describe("The second number to operate on."), - }); - - const model = new ChatVertexAI({ - temperature: 0.7, - model: "gemini-1.0-pro", - callbacks: [handler], - }).withStructuredOutput(calculatorSchema); - - const response = await model.invoke("What is 1628253239 times 81623836?"); - expect(response).toHaveProperty("operation"); - expect(response.operation).toEqual("multiply"); - expect(response).toHaveProperty("number1"); - expect(response.number1).toEqual(1628253239); - expect(response).toHaveProperty("number2"); - expect(response.number2).toEqual(81623836); + test("function", async () => { + const tools: GeminiTool[] = [ + { + functionDeclarations: [ + { + name: "test", + description: + "Run a test with a specific name and get if it passed or failed", + parameters: { + type: "object", + properties: { + testName: { + type: "string", + description: "The name of the test that should be run.", + }, + }, + required: ["testName"], + }, + }, + ], + }, + ]; + const model = new ChatVertexAI().bind({ tools }); + const result = await model.invoke("Run a test on the cobalt project"); + expect(result).toHaveProperty("content"); + expect(result.content).toBe(""); + const args = result?.lc_kwargs?.additional_kwargs; + expect(args).toBeDefined(); + expect(args).toHaveProperty("tool_calls"); + expect(Array.isArray(args.tool_calls)).toBeTruthy(); + expect(args.tool_calls).toHaveLength(1); + const call = args.tool_calls[0]; + expect(call).toHaveProperty("type"); + expect(call.type).toBe("function"); + expect(call).toHaveProperty("function"); + const func = call.function; + expect(func).toBeDefined(); + expect(func).toHaveProperty("name"); + expect(func.name).toBe("test"); + expect(func).toHaveProperty("arguments"); + expect(typeof func.arguments).toBe("string"); + expect(func.arguments.replaceAll("\n", "")).toBe('{"testName":"cobalt"}'); + }); + + test("function reply", async () => { + const tools: GeminiTool[] = [ + { + functionDeclarations: [ + { + name: "test", + description: + "Run a test with a specific name and get if it passed or failed", + parameters: { + type: "object", + properties: { + testName: { + type: "string", + description: "The name of the test that should be run.", + }, + }, + required: ["testName"], + }, + }, + ], + }, + ]; + const model = new ChatVertexAI().bind({ tools }); + const toolResult = { + testPassed: true, + }; + const messages: BaseMessageLike[] = [ + new HumanMessage("Run a test on the cobalt project."), + new AIMessage("", { + tool_calls: [ + { + id: "test", + type: "function", + function: { + name: "test", + arguments: '{"testName":"cobalt"}', + }, + }, + ], + }), + new ToolMessage(JSON.stringify(toolResult), "test"), + ]; + const res = await model.stream(messages); + const resArray: BaseMessageChunk[] = []; + for await (const chunk of res) { + resArray.push(chunk); + } + console.log(JSON.stringify(resArray, null, 2)); + }); + + test("withStructuredOutput", async () => { + const tool = { + name: "get_weather", + description: + "Get the weather of a specific location and return the temperature in Celsius.", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The name of city to get the weather for.", + }, + }, + required: ["location"], + }, + }; + const model = new ChatVertexAI().withStructuredOutput(tool); + const result = await model.invoke("What is the weather in Paris?"); + expect(result).toHaveProperty("location"); }); });