diff --git a/docs/core_docs/docs/integrations/chat/google_vertex_ai.mdx b/docs/core_docs/docs/integrations/chat/google_vertex_ai.mdx index d1211f50f67f..c9606af89328 100644 --- a/docs/core_docs/docs/integrations/chat/google_vertex_ai.mdx +++ b/docs/core_docs/docs/integrations/chat/google_vertex_ai.mdx @@ -85,10 +85,10 @@ import ChatVertexAI from "@examples/models/chat/integration_googlevertexai.ts"; {ChatVertexAI} :::tip -See the LangSmith trace for the example above [here](https://smith.langchain.com/public/9fb579d8-4987-4302-beca-29a684ae2f4c/r). +See the LangSmith trace for the example above [here](https://smith.langchain.com/public/9403290d-1ca6-41e5-819c-f3ec233194c5/r). ::: -## Multimodality +## Multi-modal The Gemini API can process multi modal inputs. The example below demonstrates how to do this: @@ -96,6 +96,10 @@ import MultiModalVertexAI from "@examples/models/chat/integration_googlevertexai {MultiModalVertexAI} +:::tip +See the LangSmith trace for the example above [here](https://smith.langchain.com/public/4cb2707d-bcf8-417e-8965-310b3045eb62/r). +::: + ### Streaming `ChatVertexAI` also supports streaming in multiple chunks for faster responses: @@ -105,7 +109,7 @@ import ChatVertexAIStreaming from "@examples/models/chat/integration_googleverte {ChatVertexAIStreaming} :::tip -See the LangSmith trace for the example above [here](https://smith.langchain.com/public/ba4cb190-3f60-49aa-a6f8-7d31316d94cf/r). +See the LangSmith trace for the example above [here](https://smith.langchain.com/public/011c26dc-b7db-4fad-b0f2-3653f41a7667/r). ::: ### Tool calling @@ -117,19 +121,19 @@ import ChatVertexAITool from "@examples/models/chat/integration_googlevertexai-t {ChatVertexAITool} :::tip -See the LangSmith trace for the example above [here](https://smith.langchain.com/public/49e1c32c-395a-45e2-afba-913aa3389137/r). +See the LangSmith trace for the example above [here](https://smith.langchain.com/public/e6714fb3-ef24-447c-810d-7ff2c80c7db4/r). ::: ### `withStructuredOutput` Alternatively, you can also use the `withStructuredOutput` method: -import ChatVertexAIWSA from "@examples/models/chat/integration_googlevertexai-wsa.ts"; +import ChatVertexAIWSO from "@examples/models/chat/integration_googlevertexai-wso.ts"; -{ChatVertexAIWSA} +{ChatVertexAIWSO} :::tip -See the LangSmith trace for the example above [here](https://smith.langchain.com/public/41bbbddb-f357-4bfa-a111-def8294a4514/r). +See the LangSmith trace for the example above [here](https://smith.langchain.com/public/d7b9860a-a761-4f76-ba57-195759eb38e7/r). ::: ### VertexAI tools agent diff --git a/examples/src/models/chat/integration_googlevertexai-multimodal.ts b/examples/src/models/chat/integration_googlevertexai-multimodal.ts index e072ac980c88..dc0474daaa1e 100644 --- a/examples/src/models/chat/integration_googlevertexai-multimodal.ts +++ b/examples/src/models/chat/integration_googlevertexai-multimodal.ts @@ -1,15 +1,34 @@ +import { ChatPromptTemplate } from "@langchain/core/prompts"; import { ChatVertexAI } from "@langchain/google-vertexai"; +import fs from "node:fs"; const model = new ChatVertexAI({ - model: "gemini-pro-vision", + model: "gemini-pro-vision", temperature: 0.7, }); -const imageURL = "imageURL"; // Replace with your image URL - -const response = await model.invoke([ - { type: "text", text: "Describe what you see in this image." }, - { type: "image_url", image_url: { url: imageURL } }, +const image = fs.readFileSync("./hotdog.jpg").toString("base64"); +const prompt = ChatPromptTemplate.fromMessages([ + [ + "human", + [ + { + type: "text", + text: "Describe the following image.", + }, + { + type: "image_url", + image_url: "data:image/png;base64,{image_base64}", + }, + ], + ], ]); -console.log(response.content); // Extract the text content from the response \ No newline at end of file +const response = await prompt.pipe(model).invoke({ + image_base64: image, +}); + +console.log(response.content); +/* +This is an image of a hot dog. The hot dog is on a white background. The hot dog is a grilled sausage in a bun. +*/ diff --git a/examples/src/models/chat/integration_googlevertexai-streaming.ts b/examples/src/models/chat/integration_googlevertexai-streaming.ts index d8e5d094a75c..5f2c2d0d44d3 100644 --- a/examples/src/models/chat/integration_googlevertexai-streaming.ts +++ b/examples/src/models/chat/integration_googlevertexai-streaming.ts @@ -12,23 +12,14 @@ const stream = await model.stream([ ]); for await (const chunk of stream) { - console.log(chunk); + console.log(chunk.content); } /* -AIMessageChunk { - content: [{ type: 'text', text: 'Ahoy there, matey! Me favorite grub be fish and chips, with' }], - additional_kwargs: {}, - response_metadata: { data: { candidates: [Array], promptFeedback: [Object] } } -} -AIMessageChunk { - content: [{ type: 'text', text: " a hearty pint o' grog to wash it down. What be yer fancy, landlubber?" }], - additional_kwargs: {}, - response_metadata: { data: { candidates: [Array] } } -} -AIMessageChunk { - content: '', - additional_kwargs: {}, - response_metadata: { finishReason: 'stop' } -} +A +hoy, matey! Me favorite food be a hearty plate o' grub, + with a side o' scurvy dogs and a tankard o' grog +. Argh! + + */ diff --git a/examples/src/models/chat/integration_googlevertexai-tools.ts b/examples/src/models/chat/integration_googlevertexai-tools.ts index e36dd82c984e..633a104564cc 100644 --- a/examples/src/models/chat/integration_googlevertexai-tools.ts +++ b/examples/src/models/chat/integration_googlevertexai-tools.ts @@ -36,13 +36,13 @@ console.log(JSON.stringify(response.additional_kwargs, null, 2)); { "tool_calls": [ { - "id": "calculator", + "id": "a20075d3b0e34f7ca60cc135916e620d", "type": "function", "function": { "name": "calculator", - "arguments": "{\"number2\":81623836,\"number1\":1628253239,\"operation\":\"multiply\"}" + "arguments": "{\"number1\":1628253239,\"operation\":\"multiply\",\"number2\":81623836}" } } - ], + ] } */ diff --git a/examples/src/models/chat/integration_googlevertexai-wsa.ts b/examples/src/models/chat/integration_googlevertexai-wso.ts similarity index 100% rename from examples/src/models/chat/integration_googlevertexai-wsa.ts rename to examples/src/models/chat/integration_googlevertexai-wso.ts diff --git a/examples/src/models/chat/integration_googlevertexai.ts b/examples/src/models/chat/integration_googlevertexai.ts index 3fa1f2c1caae..e0a3646f86f9 100644 --- a/examples/src/models/chat/integration_googlevertexai.ts +++ b/examples/src/models/chat/integration_googlevertexai.ts @@ -11,8 +11,11 @@ const response = await model.invoke("Why is the ocean blue?"); console.log(response); /* AIMessageChunk { - content: [{ type: 'text', text: 'The ocean appears blue due to a phenomenon called Rayleigh scattering. This occurs when sunlight' }], - additional_kwargs: {}, - response_metadata: {} + "content": "The ocean appears blue due to a combination of factors:\n\n**1. Rayleigh Scattering:**\n\n* This is the primary reason. Sunlight is made up of all colors of the rainbow. When sunlight enters the ocean, blue wavelengths are scattered more than other colors by water molecules. \n* This scattered blue light is what we see. Think of it like sunlight being scattered by the atmosphere, making the sky appear blue.\n\n**2. Absorption of Other Colors:**\n\n* Water absorbs red, orange, yellow, and green wavelengths of light more readily than blue. This means less of those colors reach our eyes.\n* The deeper the water, the more red light is absorbed, making the ocean appear even bluer.\n\n**3. Other Factors:**\n\n* **Depth:** The deeper the water, the bluer it appears.\n* **Turbidity:** The presence of particles like sediment or plankton can affect the color. A cloudy ocean might appear more greenish or brown.\n* **Time of Day:** The ocean can appear different colors depending on the angle of the sun.\n\n**In Summary:**\n\nThe ocean appears blue primarily due to Rayleigh scattering, where blue wavelengths of light are scattered more effectively by water molecules. This, combined with the absorption of other colors by water, results in the blue hue we perceive.\n", + "usage_metadata": { + "input_tokens": 6, + "output_tokens": 276, + "total_tokens": 282 + } } */