diff --git a/langchain-core/langchain.config.js b/langchain-core/langchain.config.js index 83ec9a213e19..09491b439c96 100644 --- a/langchain-core/langchain.config.js +++ b/langchain-core/langchain.config.js @@ -24,6 +24,7 @@ export const config = { "language_models/base": "language_models/base", "language_models/chat_models": "language_models/chat_models", "language_models/llms": "language_models/llms", + "legacy/chains": "legacy/chains", load: "load/index", "load/serializable": "load/serializable", memory: "memory", diff --git a/langchain-core/src/legacy/chains/base.ts b/langchain-core/src/legacy/chains/base.ts new file mode 100644 index 000000000000..e54e06abb06e --- /dev/null +++ b/langchain-core/src/legacy/chains/base.ts @@ -0,0 +1,258 @@ +import { BaseMemory } from "../../memory.js"; +import { ChainValues } from "../../utils/types/index.js"; +import { RUN_KEY } from "../../outputs.js"; +import { + CallbackManagerForChainRun, + CallbackManager, + Callbacks, + parseCallbackConfigArg, +} from "../../callbacks/manager.js"; +import { ensureConfig, type RunnableConfig } from "../../runnables/index.js"; +import { + BaseLangChain, + BaseLangChainParams, +} from "../../language_models/base.js"; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type LoadValues = Record; + +export interface ChainInputs extends BaseLangChainParams { + memory?: BaseMemory; + + /** + * @deprecated Use `callbacks` instead + */ + callbackManager?: CallbackManager; +} + +/** + * Base interface that all chains must implement. + */ +export abstract class BaseChain< + RunInput extends ChainValues = ChainValues, + RunOutput extends ChainValues = ChainValues + > + extends BaseLangChain + implements ChainInputs +{ + declare memory?: BaseMemory; + + get lc_namespace(): string[] { + return ["langchain", "chains", this._chainType()]; + } + + constructor( + fields?: BaseMemory | ChainInputs, + /** @deprecated */ + verbose?: boolean, + /** @deprecated */ + callbacks?: Callbacks + ) { + if ( + arguments.length === 1 && + typeof fields === "object" && + !("saveContext" in fields) + ) { + // fields is not a BaseMemory + const { memory, callbackManager, ...rest } = fields; + super({ ...rest, callbacks: callbackManager ?? rest.callbacks }); + this.memory = memory; + } else { + // fields is a BaseMemory + super({ verbose, callbacks }); + this.memory = fields as BaseMemory; + } + } + + /** @ignore */ + _selectMemoryInputs(values: ChainValues): ChainValues { + const valuesForMemory = { ...values }; + if ("signal" in valuesForMemory) { + delete valuesForMemory.signal; + } + if ("timeout" in valuesForMemory) { + delete valuesForMemory.timeout; + } + return valuesForMemory; + } + + /** + * Invoke the chain with the provided input and returns the output. + * @param input Input values for the chain run. + * @param config Optional configuration for the Runnable. + * @returns Promise that resolves with the output of the chain run. + */ + async invoke(input: RunInput, options?: RunnableConfig): Promise { + const config = ensureConfig(options); + const fullValues = await this._formatValues(input); + const callbackManager_ = await CallbackManager.configure( + config?.callbacks, + this.callbacks, + config?.tags, + this.tags, + config?.metadata, + this.metadata, + { verbose: this.verbose } + ); + const runManager = await callbackManager_?.handleChainStart( + this.toJSON(), + fullValues, + undefined, + undefined, + undefined, + undefined, + config?.runName + ); + let outputValues: RunOutput; + try { + outputValues = await (fullValues.signal + ? (Promise.race([ + this._call(fullValues as RunInput, runManager, config), + new Promise((_, reject) => { + fullValues.signal?.addEventListener("abort", () => { + reject(new Error("AbortError")); + }); + }), + ]) as Promise) + : this._call(fullValues as RunInput, runManager, config)); + } catch (e) { + await runManager?.handleChainError(e); + throw e; + } + if (!(this.memory == null)) { + await this.memory.saveContext( + this._selectMemoryInputs(input), + outputValues + ); + } + await runManager?.handleChainEnd(outputValues); + // add the runManager's currentRunId to the outputValues + Object.defineProperty(outputValues, RUN_KEY, { + value: runManager ? { runId: runManager?.runId } : undefined, + configurable: true, + }); + return outputValues; + } + + private _validateOutputs(outputs: Record): void { + const missingKeys = this.outputKeys.filter((k) => !(k in outputs)); + if (missingKeys.length) { + throw new Error( + `Missing output keys: ${missingKeys.join( + ", " + )} from chain ${this._chainType()}` + ); + } + } + + async prepOutputs( + inputs: Record, + outputs: Record, + returnOnlyOutputs = false + ) { + this._validateOutputs(outputs); + if (this.memory) { + await this.memory.saveContext(inputs, outputs); + } + if (returnOnlyOutputs) { + return outputs; + } + return { ...inputs, ...outputs }; + } + + /** + * Run the core logic of this chain and return the output + */ + abstract _call( + values: RunInput, + runManager?: CallbackManagerForChainRun, + config?: RunnableConfig + ): Promise; + + /** + * Return the string type key uniquely identifying this class of chain. + */ + abstract _chainType(): string; + + abstract get inputKeys(): string[]; + + abstract get outputKeys(): string[]; + + /** @deprecated Use .invoke() instead. Will be removed in 0.2.0. */ + async run( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + input: any, + config?: Callbacks | RunnableConfig + ): Promise { + const inputKeys = this.inputKeys.filter( + (k) => !this.memory?.memoryKeys.includes(k) ?? true + ); + const isKeylessInput = inputKeys.length <= 1; + if (!isKeylessInput) { + throw new Error( + `Chain ${this._chainType()} expects multiple inputs, cannot use 'run' ` + ); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const values = inputKeys.length ? { [inputKeys[0]]: input } : ({} as any); + const returnValues = await this.call(values, config); + const keys = Object.keys(returnValues); + + if (keys.length === 1) { + return returnValues[keys[0]]; + } + throw new Error( + "return values have multiple keys, `run` only supported when one key currently" + ); + } + + protected async _formatValues( + values: ChainValues & { signal?: AbortSignal; timeout?: number } + ) { + const fullValues = { ...values } as typeof values; + if (fullValues.timeout && !fullValues.signal) { + fullValues.signal = AbortSignal.timeout(fullValues.timeout); + delete fullValues.timeout; + } + if (!(this.memory == null)) { + const newValues = await this.memory.loadMemoryVariables( + this._selectMemoryInputs(values) + ); + for (const [key, value] of Object.entries(newValues)) { + fullValues[key] = value; + } + } + return fullValues; + } + + /** + * @deprecated Use .invoke() instead. Will be removed in 0.2.0. + * + * Run the core logic of this chain and add to output if desired. + * + * Wraps _call and handles memory. + */ + async call( + values: ChainValues & { signal?: AbortSignal; timeout?: number }, + config?: Callbacks | RunnableConfig, + /** @deprecated */ + tags?: string[] + ): Promise { + const parsedConfig = { tags, ...parseCallbackConfigArg(config) }; + return this.invoke(values as RunInput, parsedConfig); + } + + /** + * @deprecated Use .batch() instead. Will be removed in 0.2.0. + * + * Call the chain on all inputs in the list + */ + async apply( + inputs: RunInput[], + config?: (Callbacks | RunnableConfig)[] + ): Promise { + return Promise.all( + inputs.map(async (i, idx) => this.call(i, config?.[idx])) + ); + } +} diff --git a/langchain-core/src/legacy/chains/index.ts b/langchain-core/src/legacy/chains/index.ts new file mode 100644 index 000000000000..3a9ddc921324 --- /dev/null +++ b/langchain-core/src/legacy/chains/index.ts @@ -0,0 +1,2 @@ +export * from "./base.js"; +export * from "./llm.js"; diff --git a/langchain-core/src/legacy/chains/llm.ts b/langchain-core/src/legacy/chains/llm.ts new file mode 100644 index 000000000000..01777b194180 --- /dev/null +++ b/langchain-core/src/legacy/chains/llm.ts @@ -0,0 +1,284 @@ +import { + BaseLanguageModel, + BaseLanguageModelInterface, + BaseLanguageModelInput, +} from "../../language_models/base.js"; +import type { ChainValues } from "../../utils/types/index.js"; +import type { Generation } from "../../outputs.js"; +import type { BaseMessage } from "../../messages/index.js"; +import type { BasePromptValueInterface } from "../../prompt_values.js"; +import { BasePromptTemplate } from "../../prompts/base.js"; +import { + BaseLLMOutputParser, + BaseOutputParser, +} from "../../output_parsers/base.js"; +import { + CallbackManager, + BaseCallbackConfig, + CallbackManagerForChainRun, + Callbacks, +} from "../../callbacks/manager.js"; +import { Runnable, type RunnableInterface } from "../../runnables/base.js"; +import { BaseChain, ChainInputs } from "./base.js"; +import { NoOpOutputParser } from "../../output_parsers/noop.js"; + +type LLMType = + | BaseLanguageModelInterface + | Runnable + | Runnable; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +type CallOptionsIfAvailable = T extends { CallOptions: infer CO } ? CO : any; +/** + * Interface for the input parameters of the LLMChain class. + */ +export interface LLMChainInput< + T extends string | object = string, + Model extends LLMType = LLMType +> extends ChainInputs { + /** Prompt object to use */ + prompt: BasePromptTemplate; + /** LLM Wrapper to use */ + llm: Model; + /** Kwargs to pass to LLM */ + llmKwargs?: CallOptionsIfAvailable; + /** OutputParser to use */ + outputParser?: BaseLLMOutputParser; + /** Key to use for output, defaults to `text` */ + outputKey?: string; +} + +function isBaseLanguageModel(llmLike: unknown): llmLike is BaseLanguageModel { + return typeof (llmLike as BaseLanguageModelInterface)._llmType === "function"; +} + +function _getLanguageModel(llmLike: RunnableInterface): BaseLanguageModel { + if (isBaseLanguageModel(llmLike)) { + return llmLike; + } else if ("bound" in llmLike && Runnable.isRunnable(llmLike.bound)) { + return _getLanguageModel(llmLike.bound); + } else if ( + "runnable" in llmLike && + "fallbacks" in llmLike && + Runnable.isRunnable(llmLike.runnable) + ) { + return _getLanguageModel(llmLike.runnable); + } else if ("default" in llmLike && Runnable.isRunnable(llmLike.default)) { + return _getLanguageModel(llmLike.default); + } else { + throw new Error("Unable to extract BaseLanguageModel from llmLike object."); + } +} + +/** + * Chain to run queries against LLMs. + * + * @example + * ```ts + * import { LLMChain } from "langchain/chains"; + * import { OpenAI } from "langchain/llms/openai"; + * import { PromptTemplate } from "langchain/prompts"; + * + * const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke"); + * const llm = new LLMChain({ llm: new OpenAI(), prompt }); + * ``` + */ +export class LLMChain< + T extends string | object = string, + Model extends LLMType = LLMType + > + extends BaseChain + implements LLMChainInput +{ + static lc_name() { + return "LLMChain"; + } + + lc_serializable = true; + + prompt: BasePromptTemplate; + + llm: Model; + + llmKwargs?: CallOptionsIfAvailable; + + outputKey = "text"; + + outputParser?: BaseLLMOutputParser; + + get inputKeys() { + return this.prompt.inputVariables; + } + + get outputKeys() { + return [this.outputKey]; + } + + constructor(fields: LLMChainInput) { + super(fields); + this.prompt = fields.prompt; + this.llm = fields.llm; + this.llmKwargs = fields.llmKwargs; + this.outputKey = fields.outputKey ?? this.outputKey; + this.outputParser = + fields.outputParser ?? + (new NoOpOutputParser() as unknown as BaseOutputParser); + if (this.prompt.outputParser) { + if (fields.outputParser) { + throw new Error("Cannot set both outputParser and prompt.outputParser"); + } + this.outputParser = this.prompt.outputParser as BaseOutputParser; + } + } + + private getCallKeys(): string[] { + const callKeys = "callKeys" in this.llm ? this.llm.callKeys : []; + return callKeys; + } + + /** @ignore */ + _selectMemoryInputs(values: ChainValues): ChainValues { + const valuesForMemory = super._selectMemoryInputs(values); + const callKeys = this.getCallKeys(); + for (const key of callKeys) { + if (key in values) { + delete valuesForMemory[key]; + } + } + return valuesForMemory; + } + + /** @ignore */ + async _getFinalOutput( + generations: Generation[], + promptValue: BasePromptValueInterface, + runManager?: CallbackManagerForChainRun + ): Promise { + let finalCompletion: unknown; + if (this.outputParser) { + finalCompletion = await this.outputParser.parseResultWithPrompt( + generations, + promptValue, + runManager?.getChild() + ); + } else { + finalCompletion = generations[0].text; + } + return finalCompletion; + } + + /** + * Run the core logic of this chain and add to output if desired. + * + * Wraps _call and handles memory. + */ + call( + values: ChainValues & CallOptionsIfAvailable, + config?: Callbacks | BaseCallbackConfig + ): Promise { + return super.call(values, config); + } + + /** @ignore */ + async _call( + values: ChainValues & CallOptionsIfAvailable, + runManager?: CallbackManagerForChainRun + ): Promise { + const valuesForPrompt = { ...values }; + const valuesForLLM = { + ...this.llmKwargs, + } as CallOptionsIfAvailable; + const callKeys = this.getCallKeys(); + for (const key of callKeys) { + if (key in values) { + if (valuesForLLM) { + valuesForLLM[key as keyof CallOptionsIfAvailable] = + values[key]; + delete valuesForPrompt[key]; + } + } + } + const promptValue = await this.prompt.formatPromptValue(valuesForPrompt); + if ("generatePrompt" in this.llm) { + const { generations } = await this.llm.generatePrompt( + [promptValue], + valuesForLLM, + runManager?.getChild() + ); + return { + [this.outputKey]: await this._getFinalOutput( + generations[0], + promptValue, + runManager + ), + }; + } + + const modelWithParser = this.outputParser + ? this.llm.pipe(this.outputParser) + : this.llm; + const response = await modelWithParser.invoke( + promptValue, + runManager?.getChild() + ); + return { + [this.outputKey]: response, + }; + } + + /** + * Format prompt with values and pass to LLM + * + * @param values - keys to pass to prompt template + * @param callbackManager - CallbackManager to use + * @returns Completion from LLM. + * + * @example + * ```ts + * llm.predict({ adjective: "funny" }) + * ``` + */ + async predict( + values: ChainValues & CallOptionsIfAvailable, + callbackManager?: CallbackManager + ): Promise { + const output = await this.call(values, callbackManager); + return output[this.outputKey]; + } + + _chainType() { + return "llm" as const; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + static async deserialize(data: any) { + const { llm, prompt } = data; + if (!llm) { + throw new Error("LLMChain must have llm"); + } + if (!prompt) { + throw new Error("LLMChain must have prompt"); + } + + return new LLMChain({ + llm: await BaseLanguageModel.deserialize(llm), + prompt: await BasePromptTemplate.deserialize(prompt), + }); + } + + /** @deprecated */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + serialize(): any { + const serialize = + "serialize" in this.llm ? this.llm.serialize() : undefined; + return { + _type: `${this._chainType()}_chain`, + llm: serialize, + prompt: this.prompt.serialize(), + }; + } + + _getNumTokens(text: string): Promise { + return _getLanguageModel(this.llm).getNumTokens(text); + } +} diff --git a/langchain-core/src/output_parsers/index.ts b/langchain-core/src/output_parsers/index.ts index 7362403fc698..01792335a891 100644 --- a/langchain-core/src/output_parsers/index.ts +++ b/langchain-core/src/output_parsers/index.ts @@ -6,3 +6,4 @@ export * from "./structured.js"; export * from "./transform.js"; export * from "./json.js"; export * from "./xml.js"; +export * from "./noop.js"; diff --git a/langchain-core/src/output_parsers/noop.ts b/langchain-core/src/output_parsers/noop.ts new file mode 100644 index 000000000000..7c1e687ebfe7 --- /dev/null +++ b/langchain-core/src/output_parsers/noop.ts @@ -0,0 +1,39 @@ +import { BaseOutputParser } from "@langchain/core/output_parsers"; + +/** + * @deprecated Use StringOutputParser instead + * + * The NoOpOutputParser class is a type of output parser that does not + * perform any operations on the output. It extends the BaseOutputParser + * class and is part of the LangChain's output parsers module. This class + * is useful in scenarios where the raw output of the Large Language + * Models (LLMs) is required. + */ +export class NoOpOutputParser extends BaseOutputParser { + static lc_name() { + return "NoOpOutputParser"; + } + + lc_namespace = ["langchain", "output_parsers", "default"]; + + lc_serializable = true; + + /** + * This method takes a string as input and returns the same string as + * output. It does not perform any operations on the input string. + * @param text The input string to be parsed. + * @returns The same input string without any operations performed on it. + */ + parse(text: string): Promise { + return Promise.resolve(text); + } + + /** + * This method returns an empty string. It does not provide any formatting + * instructions. + * @returns An empty string, indicating no formatting instructions. + */ + getFormatInstructions(): string { + return ""; + } +} diff --git a/langchain/src/chains/base.ts b/langchain/src/chains/base.ts index 1321f080a6ce..7cec528d2911 100644 --- a/langchain/src/chains/base.ts +++ b/langchain/src/chains/base.ts @@ -1,180 +1,13 @@ -import { BaseMemory } from "@langchain/core/memory"; -import { ChainValues } from "@langchain/core/utils/types"; -import { RUN_KEY } from "@langchain/core/outputs"; import { - CallbackManagerForChainRun, - CallbackManager, - Callbacks, - parseCallbackConfigArg, -} from "@langchain/core/callbacks/manager"; -import { ensureConfig, type RunnableConfig } from "@langchain/core/runnables"; -import { - BaseLangChain, - BaseLangChainParams, -} from "@langchain/core/language_models/base"; + type LoadValues, + type ChainInputs, + BaseChain as CoreBaseChain, +} from "@langchain/core/legacy/chains"; import { SerializedBaseChain } from "./serde.js"; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type LoadValues = Record; - -export interface ChainInputs extends BaseLangChainParams { - memory?: BaseMemory; - - /** - * @deprecated Use `callbacks` instead - */ - callbackManager?: CallbackManager; -} - -/** - * Base interface that all chains must implement. - */ -export abstract class BaseChain< - RunInput extends ChainValues = ChainValues, - RunOutput extends ChainValues = ChainValues - > - extends BaseLangChain - implements ChainInputs -{ - declare memory?: BaseMemory; - - get lc_namespace(): string[] { - return ["langchain", "chains", this._chainType()]; - } - - constructor( - fields?: BaseMemory | ChainInputs, - /** @deprecated */ - verbose?: boolean, - /** @deprecated */ - callbacks?: Callbacks - ) { - if ( - arguments.length === 1 && - typeof fields === "object" && - !("saveContext" in fields) - ) { - // fields is not a BaseMemory - const { memory, callbackManager, ...rest } = fields; - super({ ...rest, callbacks: callbackManager ?? rest.callbacks }); - this.memory = memory; - } else { - // fields is a BaseMemory - super({ verbose, callbacks }); - this.memory = fields as BaseMemory; - } - } - - /** @ignore */ - _selectMemoryInputs(values: ChainValues): ChainValues { - const valuesForMemory = { ...values }; - if ("signal" in valuesForMemory) { - delete valuesForMemory.signal; - } - if ("timeout" in valuesForMemory) { - delete valuesForMemory.timeout; - } - return valuesForMemory; - } - - /** - * Invoke the chain with the provided input and returns the output. - * @param input Input values for the chain run. - * @param config Optional configuration for the Runnable. - * @returns Promise that resolves with the output of the chain run. - */ - async invoke(input: RunInput, options?: RunnableConfig): Promise { - const config = ensureConfig(options); - const fullValues = await this._formatValues(input); - const callbackManager_ = await CallbackManager.configure( - config?.callbacks, - this.callbacks, - config?.tags, - this.tags, - config?.metadata, - this.metadata, - { verbose: this.verbose } - ); - const runManager = await callbackManager_?.handleChainStart( - this.toJSON(), - fullValues, - undefined, - undefined, - undefined, - undefined, - config?.runName - ); - let outputValues: RunOutput; - try { - outputValues = await (fullValues.signal - ? (Promise.race([ - this._call(fullValues as RunInput, runManager, config), - new Promise((_, reject) => { - fullValues.signal?.addEventListener("abort", () => { - reject(new Error("AbortError")); - }); - }), - ]) as Promise) - : this._call(fullValues as RunInput, runManager, config)); - } catch (e) { - await runManager?.handleChainError(e); - throw e; - } - if (!(this.memory == null)) { - await this.memory.saveContext( - this._selectMemoryInputs(input), - outputValues - ); - } - await runManager?.handleChainEnd(outputValues); - // add the runManager's currentRunId to the outputValues - Object.defineProperty(outputValues, RUN_KEY, { - value: runManager ? { runId: runManager?.runId } : undefined, - configurable: true, - }); - return outputValues; - } - - private _validateOutputs(outputs: Record): void { - const missingKeys = this.outputKeys.filter((k) => !(k in outputs)); - if (missingKeys.length) { - throw new Error( - `Missing output keys: ${missingKeys.join( - ", " - )} from chain ${this._chainType()}` - ); - } - } - - async prepOutputs( - inputs: Record, - outputs: Record, - returnOnlyOutputs = false - ) { - this._validateOutputs(outputs); - if (this.memory) { - await this.memory.saveContext(inputs, outputs); - } - if (returnOnlyOutputs) { - return outputs; - } - return { ...inputs, ...outputs }; - } - - /** - * Run the core logic of this chain and return the output - */ - abstract _call( - values: RunInput, - runManager?: CallbackManagerForChainRun, - config?: RunnableConfig - ): Promise; - - /** - * Return the string type key uniquely identifying this class of chain. - */ - abstract _chainType(): string; +export type { LoadValues, ChainInputs }; +export abstract class BaseChain extends CoreBaseChain { /** * Return a json-like object representing this chain. */ @@ -182,88 +15,6 @@ export abstract class BaseChain< throw new Error("Method not implemented."); } - abstract get inputKeys(): string[]; - - abstract get outputKeys(): string[]; - - /** @deprecated Use .invoke() instead. Will be removed in 0.2.0. */ - async run( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - input: any, - config?: Callbacks | RunnableConfig - ): Promise { - const inputKeys = this.inputKeys.filter( - (k) => !this.memory?.memoryKeys.includes(k) ?? true - ); - const isKeylessInput = inputKeys.length <= 1; - if (!isKeylessInput) { - throw new Error( - `Chain ${this._chainType()} expects multiple inputs, cannot use 'run' ` - ); - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const values = inputKeys.length ? { [inputKeys[0]]: input } : ({} as any); - const returnValues = await this.call(values, config); - const keys = Object.keys(returnValues); - - if (keys.length === 1) { - return returnValues[keys[0]]; - } - throw new Error( - "return values have multiple keys, `run` only supported when one key currently" - ); - } - - protected async _formatValues( - values: ChainValues & { signal?: AbortSignal; timeout?: number } - ) { - const fullValues = { ...values } as typeof values; - if (fullValues.timeout && !fullValues.signal) { - fullValues.signal = AbortSignal.timeout(fullValues.timeout); - delete fullValues.timeout; - } - if (!(this.memory == null)) { - const newValues = await this.memory.loadMemoryVariables( - this._selectMemoryInputs(values) - ); - for (const [key, value] of Object.entries(newValues)) { - fullValues[key] = value; - } - } - return fullValues; - } - - /** - * @deprecated Use .invoke() instead. Will be removed in 0.2.0. - * - * Run the core logic of this chain and add to output if desired. - * - * Wraps _call and handles memory. - */ - async call( - values: ChainValues & { signal?: AbortSignal; timeout?: number }, - config?: Callbacks | RunnableConfig, - /** @deprecated */ - tags?: string[] - ): Promise { - const parsedConfig = { tags, ...parseCallbackConfigArg(config) }; - return this.invoke(values as RunInput, parsedConfig); - } - - /** - * @deprecated Use .batch() instead. Will be removed in 0.2.0. - * - * Call the chain on all inputs in the list - */ - async apply( - inputs: RunInput[], - config?: (Callbacks | RunnableConfig)[] - ): Promise { - return Promise.all( - inputs.map(async (i, idx) => this.call(i, config?.[idx])) - ); - } - /** * Load a chain from a json-like object describing it. */ diff --git a/langchain/src/chains/llm_chain.ts b/langchain/src/chains/llm_chain.ts index a4556e33e27e..79c22519bdfa 100644 --- a/langchain/src/chains/llm_chain.ts +++ b/langchain/src/chains/llm_chain.ts @@ -1,282 +1,3 @@ -import { - BaseLanguageModel, - BaseLanguageModelInterface, - BaseLanguageModelInput, -} from "@langchain/core/language_models/base"; -import type { ChainValues } from "@langchain/core/utils/types"; -import type { Generation } from "@langchain/core/outputs"; -import type { BaseMessage } from "@langchain/core/messages"; -import type { BasePromptValueInterface } from "@langchain/core/prompt_values"; -import { BasePromptTemplate } from "@langchain/core/prompts"; -import { - BaseLLMOutputParser, - BaseOutputParser, -} from "@langchain/core/output_parsers"; -import { - CallbackManager, - BaseCallbackConfig, - CallbackManagerForChainRun, - Callbacks, -} from "@langchain/core/callbacks/manager"; -import { Runnable, type RunnableInterface } from "@langchain/core/runnables"; -import { BaseChain, ChainInputs } from "./base.js"; -import { SerializedLLMChain } from "./serde.js"; -import { NoOpOutputParser } from "../output_parsers/noop.js"; +import { type LLMChainInput, LLMChain } from "@langchain/core/legacy/chains"; -type LLMType = - | BaseLanguageModelInterface - | Runnable - | Runnable; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -type CallOptionsIfAvailable = T extends { CallOptions: infer CO } ? CO : any; -/** - * Interface for the input parameters of the LLMChain class. - */ -export interface LLMChainInput< - T extends string | object = string, - Model extends LLMType = LLMType -> extends ChainInputs { - /** Prompt object to use */ - prompt: BasePromptTemplate; - /** LLM Wrapper to use */ - llm: Model; - /** Kwargs to pass to LLM */ - llmKwargs?: CallOptionsIfAvailable; - /** OutputParser to use */ - outputParser?: BaseLLMOutputParser; - /** Key to use for output, defaults to `text` */ - outputKey?: string; -} - -function isBaseLanguageModel(llmLike: unknown): llmLike is BaseLanguageModel { - return typeof (llmLike as BaseLanguageModelInterface)._llmType === "function"; -} - -function _getLanguageModel(llmLike: RunnableInterface): BaseLanguageModel { - if (isBaseLanguageModel(llmLike)) { - return llmLike; - } else if ("bound" in llmLike && Runnable.isRunnable(llmLike.bound)) { - return _getLanguageModel(llmLike.bound); - } else if ( - "runnable" in llmLike && - "fallbacks" in llmLike && - Runnable.isRunnable(llmLike.runnable) - ) { - return _getLanguageModel(llmLike.runnable); - } else if ("default" in llmLike && Runnable.isRunnable(llmLike.default)) { - return _getLanguageModel(llmLike.default); - } else { - throw new Error("Unable to extract BaseLanguageModel from llmLike object."); - } -} - -/** - * Chain to run queries against LLMs. - * - * @example - * ```ts - * import { LLMChain } from "langchain/chains"; - * import { OpenAI } from "langchain/llms/openai"; - * import { PromptTemplate } from "langchain/prompts"; - * - * const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke"); - * const llm = new LLMChain({ llm: new OpenAI(), prompt }); - * ``` - */ -export class LLMChain< - T extends string | object = string, - Model extends LLMType = LLMType - > - extends BaseChain - implements LLMChainInput -{ - static lc_name() { - return "LLMChain"; - } - - lc_serializable = true; - - prompt: BasePromptTemplate; - - llm: Model; - - llmKwargs?: CallOptionsIfAvailable; - - outputKey = "text"; - - outputParser?: BaseLLMOutputParser; - - get inputKeys() { - return this.prompt.inputVariables; - } - - get outputKeys() { - return [this.outputKey]; - } - - constructor(fields: LLMChainInput) { - super(fields); - this.prompt = fields.prompt; - this.llm = fields.llm; - this.llmKwargs = fields.llmKwargs; - this.outputKey = fields.outputKey ?? this.outputKey; - this.outputParser = - fields.outputParser ?? (new NoOpOutputParser() as BaseOutputParser); - if (this.prompt.outputParser) { - if (fields.outputParser) { - throw new Error("Cannot set both outputParser and prompt.outputParser"); - } - this.outputParser = this.prompt.outputParser as BaseOutputParser; - } - } - - private getCallKeys(): string[] { - const callKeys = "callKeys" in this.llm ? this.llm.callKeys : []; - return callKeys; - } - - /** @ignore */ - _selectMemoryInputs(values: ChainValues): ChainValues { - const valuesForMemory = super._selectMemoryInputs(values); - const callKeys = this.getCallKeys(); - for (const key of callKeys) { - if (key in values) { - delete valuesForMemory[key]; - } - } - return valuesForMemory; - } - - /** @ignore */ - async _getFinalOutput( - generations: Generation[], - promptValue: BasePromptValueInterface, - runManager?: CallbackManagerForChainRun - ): Promise { - let finalCompletion: unknown; - if (this.outputParser) { - finalCompletion = await this.outputParser.parseResultWithPrompt( - generations, - promptValue, - runManager?.getChild() - ); - } else { - finalCompletion = generations[0].text; - } - return finalCompletion; - } - - /** - * Run the core logic of this chain and add to output if desired. - * - * Wraps _call and handles memory. - */ - call( - values: ChainValues & CallOptionsIfAvailable, - config?: Callbacks | BaseCallbackConfig - ): Promise { - return super.call(values, config); - } - - /** @ignore */ - async _call( - values: ChainValues & CallOptionsIfAvailable, - runManager?: CallbackManagerForChainRun - ): Promise { - const valuesForPrompt = { ...values }; - const valuesForLLM = { - ...this.llmKwargs, - } as CallOptionsIfAvailable; - const callKeys = this.getCallKeys(); - for (const key of callKeys) { - if (key in values) { - if (valuesForLLM) { - valuesForLLM[key as keyof CallOptionsIfAvailable] = - values[key]; - delete valuesForPrompt[key]; - } - } - } - const promptValue = await this.prompt.formatPromptValue(valuesForPrompt); - if ("generatePrompt" in this.llm) { - const { generations } = await this.llm.generatePrompt( - [promptValue], - valuesForLLM, - runManager?.getChild() - ); - return { - [this.outputKey]: await this._getFinalOutput( - generations[0], - promptValue, - runManager - ), - }; - } - - const modelWithParser = this.outputParser - ? this.llm.pipe(this.outputParser) - : this.llm; - const response = await modelWithParser.invoke( - promptValue, - runManager?.getChild() - ); - return { - [this.outputKey]: response, - }; - } - - /** - * Format prompt with values and pass to LLM - * - * @param values - keys to pass to prompt template - * @param callbackManager - CallbackManager to use - * @returns Completion from LLM. - * - * @example - * ```ts - * llm.predict({ adjective: "funny" }) - * ``` - */ - async predict( - values: ChainValues & CallOptionsIfAvailable, - callbackManager?: CallbackManager - ): Promise { - const output = await this.call(values, callbackManager); - return output[this.outputKey]; - } - - _chainType() { - return "llm" as const; - } - - static async deserialize(data: SerializedLLMChain) { - const { llm, prompt } = data; - if (!llm) { - throw new Error("LLMChain must have llm"); - } - if (!prompt) { - throw new Error("LLMChain must have prompt"); - } - - return new LLMChain({ - llm: await BaseLanguageModel.deserialize(llm), - prompt: await BasePromptTemplate.deserialize(prompt), - }); - } - - /** @deprecated */ - serialize(): SerializedLLMChain { - const serialize = - "serialize" in this.llm ? this.llm.serialize() : undefined; - return { - _type: `${this._chainType()}_chain`, - llm: serialize, - prompt: this.prompt.serialize(), - }; - } - - _getNumTokens(text: string): Promise { - return _getLanguageModel(this.llm).getNumTokens(text); - } -} +export { type LLMChainInput, LLMChain }; diff --git a/langchain/src/output_parsers/noop.ts b/langchain/src/output_parsers/noop.ts index 8a49d1484786..da4babef010e 100644 --- a/langchain/src/output_parsers/noop.ts +++ b/langchain/src/output_parsers/noop.ts @@ -1,37 +1,3 @@ -import { BaseOutputParser } from "@langchain/core/output_parsers"; +import { NoOpOutputParser } from "@langchain/core/output_parsers"; -/** - * The NoOpOutputParser class is a type of output parser that does not - * perform any operations on the output. It extends the BaseOutputParser - * class and is part of the LangChain's output parsers module. This class - * is useful in scenarios where the raw output of the Large Language - * Models (LLMs) is required. - */ -export class NoOpOutputParser extends BaseOutputParser { - static lc_name() { - return "NoOpOutputParser"; - } - - lc_namespace = ["langchain", "output_parsers", "default"]; - - lc_serializable = true; - - /** - * This method takes a string as input and returns the same string as - * output. It does not perform any operations on the input string. - * @param text The input string to be parsed. - * @returns The same input string without any operations performed on it. - */ - parse(text: string): Promise { - return Promise.resolve(text); - } - - /** - * This method returns an empty string. It does not provide any formatting - * instructions. - * @returns An empty string, indicating no formatting instructions. - */ - getFormatInstructions(): string { - return ""; - } -} +export { NoOpOutputParser };