From 25e25b5a871bdc690d8fade401d4a246234b6aae Mon Sep 17 00:00:00 2001 From: bracesproul Date: Wed, 24 Jul 2024 13:08:33 -0700 Subject: [PATCH] cr --- .../how_to/chat_models_universal_init.mdx | 21 +++++++++++++------ langchain/src/chat_models/universal.ts | 5 ++++- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/docs/core_docs/docs/how_to/chat_models_universal_init.mdx b/docs/core_docs/docs/how_to/chat_models_universal_init.mdx index 7fca0414d436..fcde843c20a4 100644 --- a/docs/core_docs/docs/how_to/chat_models_universal_init.mdx +++ b/docs/core_docs/docs/how_to/chat_models_universal_init.mdx @@ -5,19 +5,28 @@ import CodeBlock from "@theme/CodeBlock"; Many LLM applications let end users specify what model provider and model they want the application to be powered by. This requires writing some logic to initialize different ChatModels based on some user configuration. The `initChatModel()` helper method makes it easy to initialize a number of different model integrations without having to worry about import paths and class names. +Keep in mind this feature is only for chat models. -:::caution -This feature is only intended to be used in Node environments. Use in non Node environments or with bundlers is not guaranteed to work and not officially supported. -::: +:::info Prerequisites -:::tip Supported models -See the [initChatModel()](https://v02.api.js.langchain.com/functions/langchain_chat_models_configurable.initChatModel.html) API reference for a full list of supported integrations. +This guide assumes familiarity with the following concepts: + +- [Chat models](/docs/concepts/#chat-models) + +- [LangChain Expression Language (LCEL)](/docs/concepts#langchain-expression-language) + +- [Tool calling](/docs/concepts#tools) -Make sure you have the integration packages installed for any model providers you want to support. E.g. you should have `@langchain/openai` installed to init an OpenAI model. ::: :::caution Compatibility +**This feature is only intended to be used in Node environments. Use in non Node environments or with bundlers is not guaranteed to work and not officially supported.** + `initChatModel` requires `langchain>=0.2.11`. See [this guide](/docs/how_to/installation/#installing-integration-packages) for some considerations to take when upgrading. + +See the [initChatModel()](https://v02.api.js.langchain.com/functions/langchain_chat_models_configurable.initChatModel.html) API reference for a full list of supported integrations. + +Make sure you have the integration packages installed for any model providers you want to support. E.g. you should have `@langchain/openai` installed to init an OpenAI model. ::: ## Basic usage diff --git a/langchain/src/chat_models/universal.ts b/langchain/src/chat_models/universal.ts index 133ea4716535..3afbdc0d46f3 100644 --- a/langchain/src/chat_models/universal.ts +++ b/langchain/src/chat_models/universal.ts @@ -575,6 +575,9 @@ export async function initChatModel< * Initialize a ChatModel from the model name and provider. * Must have the integration package corresponding to the model provider installed. * + * @template {extends BaseLanguageModelInput = BaseLanguageModelInput} RunInput - The input type for the model. + * @template {extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions} CallOptions - Call options for the model. + * * @param {string | ChatModelProvider} [model] - The name of the model, e.g. "gpt-4", "claude-3-opus-20240229". * @param {Object} [fields] - Additional configuration options. * @param {string} [fields.modelProvider] - The model provider. Supported values include: @@ -596,7 +599,7 @@ export async function initChatModel< * - string[]: Specified fields are configurable. * @param {string} [fields.configPrefix] - Prefix for configurable fields at runtime. * @param {Record} [fields.params] - Additional keyword args to pass to the ChatModel constructor. - * @returns {Promise<_ConfigurableModel>} A class which extends BaseChatModel. + * @returns {Promise<_ConfigurableModel>} A class which extends BaseChatModel. * @throws {Error} If modelProvider cannot be inferred or isn't supported. * @throws {Error} If the model provider integration package is not installed. *