From 649232bab6451f89735bf255add9ff674b15cea1 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sat, 6 Jul 2024 23:18:23 -0700 Subject: [PATCH 01/31] Use run tree as stored value in async local storage --- langchain-core/src/callbacks/manager.ts | 13 +-- langchain-core/src/runnables/base.ts | 88 +++++++++++-------- langchain-core/src/runnables/config.ts | 2 +- langchain-core/src/runnables/iter.ts | 21 +++-- langchain-core/src/singletons/index.ts | 62 ++++++++++++- .../tests/async_local_storage.test.ts | 5 +- langchain-core/src/tracers/base.ts | 1 + langchain-core/src/utils/callbacks.ts | 14 +++ langchain-core/src/utils/stream.ts | 49 ++++++++--- 9 files changed, 188 insertions(+), 67 deletions(-) create mode 100644 langchain-core/src/utils/callbacks.ts diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 1ec9f6589f83..8c21651913c3 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -20,9 +20,10 @@ import { import { consumeCallback } from "./promises.js"; import { Serialized } from "../load/serializable.js"; import type { DocumentInterface } from "../documents/document.js"; +import { isTracingEnabled } from "../utils/callbacks.js"; if ( - /* #__PURE__ */ getEnvironmentVariable("LANGCHAIN_TRACING_V2") === "true" && + /* #__PURE__ */ isTracingEnabled() && /* #__PURE__ */ getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") !== "true" ) { @@ -111,7 +112,7 @@ export abstract class BaseCallbackManager { /** * Base class for run manager in LangChain. */ -class BaseRunManager { +export class BaseRunManager { constructor( public readonly runId: string, public readonly handlers: BaseCallbackHandler[], @@ -123,6 +124,10 @@ class BaseRunManager { protected readonly _parentRunId?: string ) {} + get parentRunId() { + return this._parentRunId; + } + async handleText(text: string): Promise { await Promise.all( this.handlers.map((handler) => @@ -984,9 +989,7 @@ export class CallbackManager const verboseEnabled = getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" || options?.verbose; - const tracingV2Enabled = - getEnvironmentVariable("LANGCHAIN_TRACING_V2") === "true" || - getEnvironmentVariable("LANGSMITH_TRACING") === "true"; + const tracingV2Enabled = isTracingEnabled(); const tracingEnabled = tracingV2Enabled || diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index a2eea53ce474..488c5e45cb0b 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -7,10 +7,7 @@ import { isTraceableFunction, } from "langsmith/singletons/traceable"; import type { RunnableInterface, RunnableBatchOptions } from "./types.js"; -import { - CallbackManager, - CallbackManagerForChainRun, -} from "../callbacks/manager.js"; +import { CallbackManagerForChainRun } from "../callbacks/manager.js"; import { LogStreamCallbackHandler, LogStreamCallbackHandlerInput, @@ -310,9 +307,18 @@ export abstract class Runnable< // Buffer the first streamed chunk to allow for initial errors // to surface immediately. const config = ensureConfig(options); + const callbackManager = await getCallbackManagerForConfig(config); + console.log( + "ABOUT TO STREAM", + config, + callbackManager?.getParentRunId(), + callbackManager?.handlers + ); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this._streamIterator(input, config), config, + parentRunId: callbackManager?.getParentRunId(), + handlers: callbackManager?.handlers, }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); @@ -504,6 +510,8 @@ export abstract class Runnable< undefined, config.runName ?? this.getName() ), + callbackManager_?.getParentRunId(), + callbackManager_?.handlers, config ); delete config.runId; @@ -2227,8 +2235,13 @@ export class RunnableLambda extends Runnable< callbacks: runManager?.getChild(), recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, }); - void AsyncLocalStorageProviderSingleton.getInstance().run( + const tracingData = { + parentRunId: runManager?.parentRunId, + handlers: runManager?.handlers, + }; + void AsyncLocalStorageProviderSingleton.runWithConfig( childConfig, + tracingData, async () => { try { let output = await this.func(input, { @@ -2248,6 +2261,7 @@ export class RunnableLambda extends Runnable< let finalOutput: RunOutput | undefined; for await (const chunk of consumeAsyncIterableInContext( childConfig, + tracingData, output )) { if (finalOutput === undefined) { @@ -2267,6 +2281,7 @@ export class RunnableLambda extends Runnable< let finalOutput: RunOutput | undefined; for (const chunk of consumeIteratorInContext( childConfig, + tracingData, output )) { if (finalOutput === undefined) { @@ -2318,15 +2333,25 @@ export class RunnableLambda extends Runnable< } } } + // pass everywhere? + const childConfig = patchConfig(config, { + callbacks: runManager?.getChild(), + recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, + }); + const tracingData = { + parentRunId: runManager?.parentRunId, + handlers: runManager?.handlers, + }; const output = await new Promise( (resolve, reject) => { - void AsyncLocalStorageProviderSingleton.getInstance().run( - config, + void AsyncLocalStorageProviderSingleton.runWithConfig( + childConfig, + tracingData, async () => { try { const res = await this.func(finalChunk as RunInput, { - ...config, - config, + ...childConfig, + config: childConfig, }); resolve(res); } catch (e) { @@ -2340,23 +2365,24 @@ export class RunnableLambda extends Runnable< if (config?.recursionLimit === 0) { throw new Error("Recursion limit reached."); } - const stream = await output.stream( - finalChunk as RunInput, - patchConfig(config, { - callbacks: runManager?.getChild(), - recursionLimit: - (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, - }) - ); + const stream = await output.stream(finalChunk as RunInput, childConfig); for await (const chunk of stream) { yield chunk; } } else if (isAsyncIterable(output)) { - for await (const chunk of consumeAsyncIterableInContext(config, output)) { + for await (const chunk of consumeAsyncIterableInContext( + childConfig, + tracingData, + output + )) { yield chunk as RunOutput; } } else if (isIterableIterator(output)) { - for (const chunk of consumeIteratorInContext(config, output)) { + for (const chunk of consumeIteratorInContext( + childConfig, + tracingData, + output + )) { yield chunk as RunOutput; } } else { @@ -2383,9 +2409,12 @@ export class RunnableLambda extends Runnable< yield input; } const config = ensureConfig(options); + const callbackManager = await getCallbackManagerForConfig(config); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this.transform(generator(), config), config, + parentRunId: callbackManager?.getParentRunId(), + handlers: callbackManager?.handlers, }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); @@ -2431,15 +2460,10 @@ export class RunnableWithFallbacks extends Runnable< async invoke( input: RunInput, - options?: Partial + config?: Partial ): Promise { - const callbackManager_ = await CallbackManager.configure( - options?.callbacks, - undefined, - options?.tags, - undefined, - options?.metadata - ); + const options = ensureConfig(config); + const callbackManager_ = await getCallbackManagerForConfig(options); const { runId, ...otherOptions } = options ?? {}; const runManager = await callbackManager_?.handleChainStart( this.toJSON(), @@ -2500,15 +2524,7 @@ export class RunnableWithFallbacks extends Runnable< } const configList = this._getOptionsList(options ?? {}, inputs.length); const callbackManagers = await Promise.all( - configList.map((config) => - CallbackManager.configure( - config?.callbacks, - undefined, - config?.tags, - undefined, - config?.metadata - ) - ) + configList.map((config) => getCallbackManagerForConfig(config)) ); const runManagers = await Promise.all( callbackManagers.map(async (callbackManager, i) => { diff --git a/langchain-core/src/runnables/config.ts b/langchain-core/src/runnables/config.ts index 23e4b11bef88..582363016cf1 100644 --- a/langchain-core/src/runnables/config.ts +++ b/langchain-core/src/runnables/config.ts @@ -127,7 +127,7 @@ export function ensureConfig( config?: CallOptions ): CallOptions { const loadedConfig = - config ?? AsyncLocalStorageProviderSingleton.getInstance().getStore(); + config ?? AsyncLocalStorageProviderSingleton.getRunnableConfig(); let empty: RunnableConfig = { tags: [], metadata: {}, diff --git a/langchain-core/src/runnables/iter.ts b/langchain-core/src/runnables/iter.ts index 33df40c5991f..e40691b08e11 100644 --- a/langchain-core/src/runnables/iter.ts +++ b/langchain-core/src/runnables/iter.ts @@ -1,3 +1,4 @@ +import { BaseCallbackHandler } from "../callbacks/base.js"; import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; import { RunnableConfig } from "./config.js"; @@ -32,11 +33,15 @@ export function isAsyncIterable( export function* consumeIteratorInContext( context: Partial | undefined, + options: { parentRunId?: string; handlers?: BaseCallbackHandler[] }, iter: IterableIterator ): IterableIterator { - const storage = AsyncLocalStorageProviderSingleton.getInstance(); while (true) { - const { value, done } = storage.run(context, iter.next.bind(iter)); + const { value, done } = AsyncLocalStorageProviderSingleton.runWithConfig( + context, + options, + iter.next.bind(iter) + ); if (done) { break; } else { @@ -47,15 +52,17 @@ export function* consumeIteratorInContext( export async function* consumeAsyncIterableInContext( context: Partial | undefined, + options: { parentRunId?: string; handlers?: BaseCallbackHandler[] }, iter: AsyncIterable ): AsyncIterableIterator { - const storage = AsyncLocalStorageProviderSingleton.getInstance(); const iterator = iter[Symbol.asyncIterator](); while (true) { - const { value, done } = await storage.run( - context, - iterator.next.bind(iter) - ); + const { value, done } = + await AsyncLocalStorageProviderSingleton.runWithConfig( + context, + options, + iterator.next.bind(iter) + ); if (done) { break; } else { diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index c01af26a62e8..32beb6d9b444 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -1,4 +1,6 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ +import { type Client, RunTree } from "langsmith"; +import { isTracingEnabled } from "../utils/callbacks.js"; export interface AsyncLocalStorageInterface { getStore: () => any | undefined; @@ -21,14 +23,68 @@ const mockAsyncLocalStorage = new MockAsyncLocalStorage(); class AsyncLocalStorageProvider { getInstance(): AsyncLocalStorageInterface { return ( - (globalThis as any).__lc_tracing_async_local_storage ?? + (globalThis as any).__lc_tracing_async_local_storage_v2 ?? mockAsyncLocalStorage ); } + getRunnableConfig() { + const storage = this.getInstance(); + return storage.getStore()?.extra?._lc_runnable_config; + } + + runWithConfig( + config: any, + options: { parentRunId?: string; handlers?: any[] }, + callback: () => T + ): T { + const { parentRunId, handlers } = options; + const storage = this.getInstance(); + const currentRunTree = storage.getStore(); + let newRunTree; + if (currentRunTree !== undefined && currentRunTree.id === config?.run_id) { + newRunTree = currentRunTree; + newRunTree.extra = { + ...newRunTree.extra, + _lc_runnable_config: config, + }; + } else { + let parentRun: RunTree | undefined; + let projectName: string | undefined; + let client: Client | undefined; + + let tracingEnabled = isTracingEnabled(); + + console.log("PARENT RUN ID", parentRunId); + const langChainTracer = handlers?.find( + (handler: any) => handler?.name == "langchain_tracer" + ); + + parentRun = langChainTracer?.getRun?.(parentRunId); + projectName = langChainTracer?.projectName; + client = langChainTracer?.client; + tracingEnabled = tracingEnabled || !!langChainTracer; + newRunTree = new RunTree({ + client, + tracingEnabled, + id: config?.run_id, + parent_run_id: parentRun?.id, + project_name: projectName, + name: config?.runName ?? "", + extra: { + metadata: { ...config?.metadata }, + _lc_runnable_config: config, + }, + }); + } + // return storage.run({ extra: { _lc_runnable_config: config }}, callback); + // console.log("NEW RUN TREE", newRunTree); + return storage.run(newRunTree, callback); + } + initializeGlobalInstance(instance: AsyncLocalStorageInterface) { - if ((globalThis as any).__lc_tracing_async_local_storage === undefined) { - (globalThis as any).__lc_tracing_async_local_storage = instance; + if ((globalThis as any).__lc_tracing_async_local_storage_v2 === undefined) { + (globalThis as any).__lc_tracing_async_local_storage_v2 = instance; } } } diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index 850443f4d520..e588f05451b8 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -132,7 +132,7 @@ test("Config should be automatically populated after setting global async local ).toEqual(1); }); -test("Runnable streamEvents method with streaming nested in a RunnableLambda", async () => { +test.only("Runnable streamEvents method with streaming nested in a RunnableLambda", async () => { AsyncLocalStorageProviderSingleton.initializeGlobalInstance( new AsyncLocalStorage() ); @@ -147,11 +147,12 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a const myNestedLambda = RunnableLambda.from(myFunc); + // console.log(await myNestedLambda.invoke("hello")); + const events = []; for await (const event of myNestedLambda.streamEvents("hello", { version: "v1", })) { - console.log(event); events.push(event); } const chatModelStreamEvent = events.find((event) => { diff --git a/langchain-core/src/tracers/base.ts b/langchain-core/src/tracers/base.ts index dcd3291a4a57..a787d00dd51f 100644 --- a/langchain-core/src/tracers/base.ts +++ b/langchain-core/src/tracers/base.ts @@ -91,6 +91,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { } protected async _startTrace(run: Run) { + console.log("STARTING RUN", run, this.runMap); const currentDottedOrder = convertToDottedOrderFormat( run.start_time, run.id, diff --git a/langchain-core/src/utils/callbacks.ts b/langchain-core/src/utils/callbacks.ts new file mode 100644 index 000000000000..920c75b633ae --- /dev/null +++ b/langchain-core/src/utils/callbacks.ts @@ -0,0 +1,14 @@ +import { getEnvironmentVariable } from "./env.js"; + +export const isTracingEnabled = (tracingEnabled?: boolean): boolean => { + if (tracingEnabled !== undefined) { + return tracingEnabled; + } + const envVars = [ + "LANGSMITH_TRACING_V2", + "LANGCHAIN_TRACING_V2", + "LANGSMITH_TRACING", + "LANGCHAIN_TRACING", + ]; + return !!envVars.find((envVar) => getEnvironmentVariable(envVar) === "true"); +}; diff --git a/langchain-core/src/utils/stream.ts b/langchain-core/src/utils/stream.ts index dc64d5468d70..3e2a59b1b710 100644 --- a/langchain-core/src/utils/stream.ts +++ b/langchain-core/src/utils/stream.ts @@ -1,5 +1,6 @@ // Make this a type to override ReadableStream's async iterator type in case // the popular web-streams-polyfill is imported - the supplied types +import { BaseCallbackHandler } from "../callbacks/base.js"; import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; // in this case don't quite match. @@ -186,6 +187,10 @@ export class AsyncGeneratorWithSetup< public config?: unknown; + private parentRunId?: string; + + private handlers?: BaseCallbackHandler[]; + private firstResult: Promise>; private firstResultUsed = false; @@ -194,23 +199,31 @@ export class AsyncGeneratorWithSetup< generator: AsyncGenerator; startSetup?: () => Promise; config?: unknown; + parentRunId?: string; + handlers?: BaseCallbackHandler[]; }) { this.generator = params.generator; this.config = params.config; + this.parentRunId = params.parentRunId; + this.handlers = params.handlers; + console.log("IN SETUP", this.parentRunId, this.handlers); // setup is a promise that resolves only after the first iterator value // is available. this is useful when setup of several piped generators // needs to happen in logical order, ie. in the order in which input to // to each generator is available. this.setup = new Promise((resolve, reject) => { - const storage = AsyncLocalStorageProviderSingleton.getInstance(); - void storage.run(params.config, async () => { - this.firstResult = params.generator.next(); - if (params.startSetup) { - this.firstResult.then(params.startSetup).then(resolve, reject); - } else { - this.firstResult.then((_result) => resolve(undefined as S), reject); + void AsyncLocalStorageProviderSingleton.runWithConfig( + params.config, + { parentRunId: this.parentRunId, handlers: this.handlers }, + async () => { + this.firstResult = params.generator.next(); + if (params.startSetup) { + this.firstResult.then(params.startSetup).then(resolve, reject); + } else { + this.firstResult.then((_result) => resolve(undefined as S), reject); + } } - }); + ); }); } @@ -220,10 +233,13 @@ export class AsyncGeneratorWithSetup< return this.firstResult; } - const storage = AsyncLocalStorageProviderSingleton.getInstance(); - return storage.run(this.config, async () => { - return this.generator.next(...args); - }); + return AsyncLocalStorageProviderSingleton.runWithConfig( + this.config, + { parentRunId: this.parentRunId, handlers: this.handlers }, + async () => { + return this.generator.next(...args); + } + ); } async return( @@ -258,9 +274,16 @@ export async function pipeGeneratorWithSetup< ) => AsyncGenerator, generator: AsyncGenerator, startSetup: () => Promise, + parentRunId?: string, + handlers?: BaseCallbackHandler[], ...args: A ) { - const gen = new AsyncGeneratorWithSetup({ generator, startSetup }); + const gen = new AsyncGeneratorWithSetup({ + generator, + startSetup, + parentRunId, + handlers, + }); const setup = await gen.setup; return { output: to(gen, setup, ...args), setup }; } From 7e2c7036b91b05e42775336a1afd3fe04ccd0648 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sat, 6 Jul 2024 23:41:13 -0700 Subject: [PATCH 02/31] Fix parent run tracking when streaming runnable lambdas --- langchain-core/src/runnables/base.ts | 21 ++++++++++--------- .../tests/async_local_storage.test.ts | 14 ++++++++++++- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index a2eea53ce474..4a0fe2e55e5c 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -2318,15 +2318,20 @@ export class RunnableLambda extends Runnable< } } } + const childConfig = patchConfig(config, { + callbacks: runManager?.getChild(), + recursionLimit: + (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, + }); const output = await new Promise( (resolve, reject) => { void AsyncLocalStorageProviderSingleton.getInstance().run( - config, + childConfig, async () => { try { const res = await this.func(finalChunk as RunInput, { - ...config, - config, + ...childConfig, + config: childConfig, }); resolve(res); } catch (e) { @@ -2342,21 +2347,17 @@ export class RunnableLambda extends Runnable< } const stream = await output.stream( finalChunk as RunInput, - patchConfig(config, { - callbacks: runManager?.getChild(), - recursionLimit: - (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, - }) + childConfig ); for await (const chunk of stream) { yield chunk; } } else if (isAsyncIterable(output)) { - for await (const chunk of consumeAsyncIterableInContext(config, output)) { + for await (const chunk of consumeAsyncIterableInContext(childConfig, output)) { yield chunk as RunOutput; } } else if (isIterableIterator(output)) { - for (const chunk of consumeIteratorInContext(config, output)) { + for (const chunk of consumeIteratorInContext(childConfig, output)) { yield chunk as RunOutput; } } else { diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index 850443f4d520..3245e2af4206 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -1,8 +1,10 @@ import { test, expect } from "@jest/globals"; +import { v4 } from "uuid"; import { AsyncLocalStorage } from "node:async_hooks"; import { AsyncLocalStorageProviderSingleton } from "../index.js"; import { RunnableLambda } from "../../runnables/base.js"; import { FakeListChatModel } from "../../utils/testing/index.js"; +import { getCallbackManagerForConfig } from "../../runnables/config.js"; test("Config should be automatically populated after setting global async local storage", async () => { const inner = RunnableLambda.from((_, config) => config); @@ -136,10 +138,20 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a AsyncLocalStorageProviderSingleton.initializeGlobalInstance( new AsyncLocalStorage() ); + const asyncLocalStorage = AsyncLocalStorageProviderSingleton.getInstance(); const chat = new FakeListChatModel({ responses: ["Hello"], }); + const outerRunId = v4(); + const innerRunId = v4(); const myFunc = async (input: string) => { + const outerCallbackManager = await getCallbackManagerForConfig(asyncLocalStorage.getStore()); + expect(outerCallbackManager?.getParentRunId()).toEqual(outerRunId); + const nested2 = RunnableLambda.from(async (_: string) => { + const innerCallbackManager = await getCallbackManagerForConfig(asyncLocalStorage.getStore()); + expect(innerCallbackManager?.getParentRunId()).toEqual(innerRunId); + }); + await nested2.stream(input, { runId: innerRunId }); for await (const _ of await chat.stream(input)) { // no-op } @@ -150,8 +162,8 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a const events = []; for await (const event of myNestedLambda.streamEvents("hello", { version: "v1", + runId: outerRunId, })) { - console.log(event); events.push(event); } const chatModelStreamEvent = events.find((event) => { From 9c33b0c1bc2cfeb4d0f45219e1b543f151035abb Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sat, 6 Jul 2024 23:43:01 -0700 Subject: [PATCH 03/31] lint --- langchain-core/src/runnables/base.ts | 13 ++++++------- .../singletons/tests/async_local_storage.test.ts | 8 ++++++-- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index 4a0fe2e55e5c..b6dd32ce5052 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -2320,8 +2320,7 @@ export class RunnableLambda extends Runnable< } const childConfig = patchConfig(config, { callbacks: runManager?.getChild(), - recursionLimit: - (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, + recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, }); const output = await new Promise( (resolve, reject) => { @@ -2345,15 +2344,15 @@ export class RunnableLambda extends Runnable< if (config?.recursionLimit === 0) { throw new Error("Recursion limit reached."); } - const stream = await output.stream( - finalChunk as RunInput, - childConfig - ); + const stream = await output.stream(finalChunk as RunInput, childConfig); for await (const chunk of stream) { yield chunk; } } else if (isAsyncIterable(output)) { - for await (const chunk of consumeAsyncIterableInContext(childConfig, output)) { + for await (const chunk of consumeAsyncIterableInContext( + childConfig, + output + )) { yield chunk as RunOutput; } } else if (isIterableIterator(output)) { diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index 3245e2af4206..dab6fedffe2a 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -145,10 +145,14 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a const outerRunId = v4(); const innerRunId = v4(); const myFunc = async (input: string) => { - const outerCallbackManager = await getCallbackManagerForConfig(asyncLocalStorage.getStore()); + const outerCallbackManager = await getCallbackManagerForConfig( + asyncLocalStorage.getStore() + ); expect(outerCallbackManager?.getParentRunId()).toEqual(outerRunId); const nested2 = RunnableLambda.from(async (_: string) => { - const innerCallbackManager = await getCallbackManagerForConfig(asyncLocalStorage.getStore()); + const innerCallbackManager = await getCallbackManagerForConfig( + asyncLocalStorage.getStore() + ); expect(innerCallbackManager?.getParentRunId()).toEqual(innerRunId); }); await nested2.stream(input, { runId: innerRunId }); From 73a0698cb4faf4c40d56081bf15b5dd5806ff1f2 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 7 Jul 2024 01:00:13 -0700 Subject: [PATCH 04/31] Clean up --- langchain-core/src/runnables/base.ts | 6 ----- langchain-core/src/singletons/index.ts | 22 +++++-------------- .../tests/async_local_storage.test.ts | 7 ++---- langchain-core/src/tracers/base.ts | 1 - langchain-core/src/utils/stream.ts | 1 - 5 files changed, 8 insertions(+), 29 deletions(-) diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index 3f23b8a7adb9..d0a35cc504df 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -308,12 +308,6 @@ export abstract class Runnable< // to surface immediately. const config = ensureConfig(options); const callbackManager = await getCallbackManagerForConfig(config); - console.log( - "ABOUT TO STREAM", - config, - callbackManager?.getParentRunId(), - callbackManager?.handlers - ); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this._streamIterator(input, config), config, diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index 32beb6d9b444..dd110a2a5172 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ -import { type Client, RunTree } from "langsmith"; +import { RunTree } from "langsmith"; import { isTracingEnabled } from "../utils/callbacks.js"; export interface AsyncLocalStorageInterface { @@ -49,21 +49,13 @@ class AsyncLocalStorageProvider { _lc_runnable_config: config, }; } else { - let parentRun: RunTree | undefined; - let projectName: string | undefined; - let client: Client | undefined; - - let tracingEnabled = isTracingEnabled(); - - console.log("PARENT RUN ID", parentRunId); const langChainTracer = handlers?.find( - (handler: any) => handler?.name == "langchain_tracer" + (handler: any) => handler?.name === "langchain_tracer" ); - - parentRun = langChainTracer?.getRun?.(parentRunId); - projectName = langChainTracer?.projectName; - client = langChainTracer?.client; - tracingEnabled = tracingEnabled || !!langChainTracer; + const tracingEnabled = isTracingEnabled() || !!langChainTracer; + const parentRun = langChainTracer?.getRun?.(parentRunId); + const projectName = langChainTracer?.projectName; + const client = langChainTracer?.client; newRunTree = new RunTree({ client, tracingEnabled, @@ -77,8 +69,6 @@ class AsyncLocalStorageProvider { }, }); } - // return storage.run({ extra: { _lc_runnable_config: config }}, callback); - // console.log("NEW RUN TREE", newRunTree); return storage.run(newRunTree, callback); } diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index 585c06da2e89..9533cb1222be 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -134,18 +134,17 @@ test("Config should be automatically populated after setting global async local ).toEqual(1); }); -test.only("Runnable streamEvents method with streaming nested in a RunnableLambda", async () => { +test("Runnable streamEvents method with streaming nested in a RunnableLambda", async () => { AsyncLocalStorageProviderSingleton.initializeGlobalInstance( new AsyncLocalStorage() ); - const asyncLocalStorage = AsyncLocalStorageProviderSingleton.getInstance(); const chat = new FakeListChatModel({ responses: ["Hello"], }); const outerRunId = v4(); const myFunc = async (input: string) => { const outerCallbackManager = await getCallbackManagerForConfig( - asyncLocalStorage.getStore() + AsyncLocalStorageProviderSingleton.getRunnableConfig() ); expect(outerCallbackManager?.getParentRunId()).toEqual(outerRunId); for await (const _ of await chat.stream(input)) { @@ -155,8 +154,6 @@ test.only("Runnable streamEvents method with streaming nested in a RunnableLambd const myNestedLambda = RunnableLambda.from(myFunc); - // console.log(await myNestedLambda.invoke("hello")); - const events = []; for await (const event of myNestedLambda.streamEvents("hello", { version: "v1", diff --git a/langchain-core/src/tracers/base.ts b/langchain-core/src/tracers/base.ts index a787d00dd51f..dcd3291a4a57 100644 --- a/langchain-core/src/tracers/base.ts +++ b/langchain-core/src/tracers/base.ts @@ -91,7 +91,6 @@ export abstract class BaseTracer extends BaseCallbackHandler { } protected async _startTrace(run: Run) { - console.log("STARTING RUN", run, this.runMap); const currentDottedOrder = convertToDottedOrderFormat( run.start_time, run.id, diff --git a/langchain-core/src/utils/stream.ts b/langchain-core/src/utils/stream.ts index 3e2a59b1b710..16485fd12bb2 100644 --- a/langchain-core/src/utils/stream.ts +++ b/langchain-core/src/utils/stream.ts @@ -206,7 +206,6 @@ export class AsyncGeneratorWithSetup< this.config = params.config; this.parentRunId = params.parentRunId; this.handlers = params.handlers; - console.log("IN SETUP", this.parentRunId, this.handlers); // setup is a promise that resolves only after the first iterator value // is available. this is useful when setup of several piped generators // needs to happen in logical order, ie. in the order in which input to From abe4d2a8d37086b2520847296655a26533ea48a3 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 7 Jul 2024 01:02:40 -0700 Subject: [PATCH 05/31] Naming --- langchain-core/src/runnables/base.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index d0a35cc504df..2c4cde0e6bb2 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -2453,11 +2453,11 @@ export class RunnableWithFallbacks extends Runnable< async invoke( input: RunInput, - config?: Partial + options?: Partial ): Promise { - const options = ensureConfig(config); + const config = ensureConfig(options); const callbackManager_ = await getCallbackManagerForConfig(options); - const { runId, ...otherOptions } = options ?? {}; + const { runId, ...otherConfigFields } = config; const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -2465,14 +2465,14 @@ export class RunnableWithFallbacks extends Runnable< undefined, undefined, undefined, - otherOptions?.runName + otherConfigFields?.runName ); let firstError; for (const runnable of this.runnables()) { try { const output = await runnable.invoke( input, - patchConfig(otherOptions, { callbacks: runManager?.getChild() }) + patchConfig(otherConfigFields, { callbacks: runManager?.getChild() }) ); await runManager?.handleChainEnd(_coerceToDict(output, "output")); return output; From d8dc43bd1d00ec864b1e9ab808e85a163bab7298 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 7 Jul 2024 01:37:05 -0700 Subject: [PATCH 06/31] Clean up --- langchain-core/src/callbacks/manager.ts | 24 +++++++++- langchain-core/src/runnables/base.ts | 41 ++++------------- langchain-core/src/runnables/branch.ts | 2 +- langchain-core/src/runnables/config.ts | 44 ++++++------------- langchain-core/src/runnables/iter.ts | 5 --- langchain-core/src/runnables/remote.ts | 6 +-- langchain-core/src/runnables/types.ts | 19 +++++++- langchain-core/src/singletons/index.ts | 21 +++++---- .../tests/async_local_storage.test.ts | 2 +- langchain-core/src/tracers/initialize.ts | 10 ----- langchain-core/src/utils/stream.ts | 21 ++------- libs/langchain-scripts/src/_data/importMap.ts | 5 --- 12 files changed, 82 insertions(+), 118 deletions(-) diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 8c21651913c3..65f5c2636ac2 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -9,7 +9,6 @@ import { NewTokenIndices, } from "./base.js"; import { ConsoleCallbackHandler } from "../tracers/console.js"; -import { getTracingV2CallbackHandler } from "../tracers/initialize.js"; import { type BaseMessage } from "../messages/base.js"; import { getBufferString } from "../messages/utils.js"; import { getEnvironmentVariable } from "../utils/env.js"; @@ -967,6 +966,27 @@ export class CallbackManager localMetadata?: Record, options?: CallbackManagerOptions ): Promise { + return this._configureSync( + inheritableHandlers, + localHandlers, + inheritableTags, + localTags, + inheritableMetadata, + localMetadata, + options + ); + } + + // TODO: Deprecate async method in favor of this one. + static _configureSync( + inheritableHandlers?: Callbacks, + localHandlers?: Callbacks, + inheritableTags?: string[], + localTags?: string[], + inheritableMetadata?: Record, + localMetadata?: Record, + options?: CallbackManagerOptions + ) { let callbackManager: CallbackManager | undefined; if (inheritableHandlers || localHandlers) { if (Array.isArray(inheritableHandlers) || !inheritableHandlers) { @@ -1014,7 +1034,7 @@ export class CallbackManager ) ) { if (tracingV2Enabled) { - const tracerV2 = await getTracingV2CallbackHandler(); + const tracerV2 = new LangChainTracer(); callbackManager.addHandler(tracerV2, true); // handoff between langchain and langsmith/traceable diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index 2c4cde0e6bb2..52ca00841d34 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -307,12 +307,9 @@ export abstract class Runnable< // Buffer the first streamed chunk to allow for initial errors // to surface immediately. const config = ensureConfig(options); - const callbackManager = await getCallbackManagerForConfig(config); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this._streamIterator(input, config), config, - parentRunId: callbackManager?.getParentRunId(), - handlers: callbackManager?.handlers, }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); @@ -360,7 +357,7 @@ export abstract class Runnable< options?: Partial & { runType?: string } ) { const config = ensureConfig(options); - const callbackManager_ = await getCallbackManagerForConfig(config); + const callbackManager_ = getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -469,7 +466,7 @@ export abstract class Runnable< let finalOutputSupported = true; const config = ensureConfig(options); - const callbackManager_ = await getCallbackManagerForConfig(config); + const callbackManager_ = getCallbackManagerForConfig(config); async function* wrapInputForTracing() { for await (const chunk of inputGenerator) { if (finalInputSupported) { @@ -504,8 +501,6 @@ export abstract class Runnable< undefined, config.runName ?? this.getName() ), - callbackManager_?.getParentRunId(), - callbackManager_?.handlers, config ); delete config.runId; @@ -1665,7 +1660,7 @@ export class RunnableSequence< async invoke(input: RunInput, options?: RunnableConfig): Promise { const config = ensureConfig(options); - const callbackManager_ = await getCallbackManagerForConfig(config); + const callbackManager_ = getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -1778,7 +1773,7 @@ export class RunnableSequence< input: RunInput, options?: RunnableConfig ): AsyncGenerator { - const callbackManager_ = await getCallbackManagerForConfig(options); + const callbackManager_ = getCallbackManagerForConfig(options); const { runId, ...otherOptions } = options ?? {}; const runManager = await callbackManager_?.handleChainStart( this.toJSON(), @@ -1974,7 +1969,7 @@ export class RunnableMap< options?: Partial ): Promise { const config = ensureConfig(options); - const callbackManager_ = await getCallbackManagerForConfig(config); + const callbackManager_ = getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), { @@ -2103,7 +2098,7 @@ export class RunnableTraceable extends Runnable< async invoke(input: RunInput, options?: Partial) { const [config] = this._getOptionsList(options ?? {}, 1); - const callbacks = await getCallbackManagerForConfig(config); + const callbacks = getCallbackManagerForConfig(config); return (await this.func( patchConfig(config, { callbacks }), @@ -2229,13 +2224,8 @@ export class RunnableLambda extends Runnable< callbacks: runManager?.getChild(), recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, }); - const tracingData = { - parentRunId: runManager?.parentRunId, - handlers: runManager?.handlers, - }; void AsyncLocalStorageProviderSingleton.runWithConfig( childConfig, - tracingData, async () => { try { let output = await this.func(input, { @@ -2255,7 +2245,6 @@ export class RunnableLambda extends Runnable< let finalOutput: RunOutput | undefined; for await (const chunk of consumeAsyncIterableInContext( childConfig, - tracingData, output )) { if (finalOutput === undefined) { @@ -2275,7 +2264,6 @@ export class RunnableLambda extends Runnable< let finalOutput: RunOutput | undefined; for (const chunk of consumeIteratorInContext( childConfig, - tracingData, output )) { if (finalOutput === undefined) { @@ -2331,15 +2319,10 @@ export class RunnableLambda extends Runnable< callbacks: runManager?.getChild(), recursionLimit: (config?.recursionLimit ?? DEFAULT_RECURSION_LIMIT) - 1, }); - const tracingData = { - parentRunId: runManager?.parentRunId, - handlers: runManager?.handlers, - }; const output = await new Promise( (resolve, reject) => { void AsyncLocalStorageProviderSingleton.runWithConfig( childConfig, - tracingData, async () => { try { const res = await this.func(finalChunk as RunInput, { @@ -2365,17 +2348,12 @@ export class RunnableLambda extends Runnable< } else if (isAsyncIterable(output)) { for await (const chunk of consumeAsyncIterableInContext( childConfig, - tracingData, output )) { yield chunk as RunOutput; } } else if (isIterableIterator(output)) { - for (const chunk of consumeIteratorInContext( - childConfig, - tracingData, - output - )) { + for (const chunk of consumeIteratorInContext(childConfig, output)) { yield chunk as RunOutput; } } else { @@ -2402,12 +2380,9 @@ export class RunnableLambda extends Runnable< yield input; } const config = ensureConfig(options); - const callbackManager = await getCallbackManagerForConfig(config); const wrappedGenerator = new AsyncGeneratorWithSetup({ generator: this.transform(generator(), config), config, - parentRunId: callbackManager?.getParentRunId(), - handlers: callbackManager?.handlers, }); await wrappedGenerator.setup; return IterableReadableStream.fromAsyncGenerator(wrappedGenerator); @@ -2456,7 +2431,7 @@ export class RunnableWithFallbacks extends Runnable< options?: Partial ): Promise { const config = ensureConfig(options); - const callbackManager_ = await getCallbackManagerForConfig(options); + const callbackManager_ = getCallbackManagerForConfig(options); const { runId, ...otherConfigFields } = config; const runManager = await callbackManager_?.handleChainStart( this.toJSON(), diff --git a/langchain-core/src/runnables/branch.ts b/langchain-core/src/runnables/branch.ts index c9670aa54976..7585a744746d 100644 --- a/langchain-core/src/runnables/branch.ts +++ b/langchain-core/src/runnables/branch.ts @@ -186,7 +186,7 @@ export class RunnableBranch extends Runnable< } async *_streamIterator(input: RunInput, config?: Partial) { - const callbackManager_ = await getCallbackManagerForConfig(config); + const callbackManager_ = getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), diff --git a/langchain-core/src/runnables/config.ts b/langchain-core/src/runnables/config.ts index 582363016cf1..fb92e1338d72 100644 --- a/langchain-core/src/runnables/config.ts +++ b/langchain-core/src/runnables/config.ts @@ -1,38 +1,10 @@ -import { - type BaseCallbackConfig, - CallbackManager, - ensureHandler, -} from "../callbacks/manager.js"; +import { CallbackManager, ensureHandler } from "../callbacks/manager.js"; import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; +import { RunnableConfig } from "./types.js"; export const DEFAULT_RECURSION_LIMIT = 25; -export interface RunnableConfig extends BaseCallbackConfig { - /** - * Runtime values for attributes previously made configurable on this Runnable, - * or sub-Runnables. - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - configurable?: Record; - - /** - * Maximum number of times a call can recurse. If not provided, defaults to 25. - */ - recursionLimit?: number; - - /** Maximum number of parallel calls to make. */ - maxConcurrency?: number; -} - -export async function getCallbackManagerForConfig(config?: RunnableConfig) { - return CallbackManager.configure( - config?.callbacks, - undefined, - config?.tags, - undefined, - config?.metadata - ); -} +export { type RunnableConfig }; export function mergeConfigs( ...configs: (CallOptions | RunnableConfig | undefined | null)[] @@ -194,3 +166,13 @@ export function patchConfig( } return newConfig; } + +export function getCallbackManagerForConfig(config?: RunnableConfig) { + return CallbackManager._configureSync( + config?.callbacks, + undefined, + config?.tags, + undefined, + config?.metadata + ); +} diff --git a/langchain-core/src/runnables/iter.ts b/langchain-core/src/runnables/iter.ts index e40691b08e11..32434a648399 100644 --- a/langchain-core/src/runnables/iter.ts +++ b/langchain-core/src/runnables/iter.ts @@ -1,4 +1,3 @@ -import { BaseCallbackHandler } from "../callbacks/base.js"; import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; import { RunnableConfig } from "./config.js"; @@ -33,13 +32,11 @@ export function isAsyncIterable( export function* consumeIteratorInContext( context: Partial | undefined, - options: { parentRunId?: string; handlers?: BaseCallbackHandler[] }, iter: IterableIterator ): IterableIterator { while (true) { const { value, done } = AsyncLocalStorageProviderSingleton.runWithConfig( context, - options, iter.next.bind(iter) ); if (done) { @@ -52,7 +49,6 @@ export function* consumeIteratorInContext( export async function* consumeAsyncIterableInContext( context: Partial | undefined, - options: { parentRunId?: string; handlers?: BaseCallbackHandler[] }, iter: AsyncIterable ): AsyncIterableIterator { const iterator = iter[Symbol.asyncIterator](); @@ -60,7 +56,6 @@ export async function* consumeAsyncIterableInContext( const { value, done } = await AsyncLocalStorageProviderSingleton.runWithConfig( context, - options, iterator.next.bind(iter) ); if (done) { diff --git a/langchain-core/src/runnables/remote.ts b/langchain-core/src/runnables/remote.ts index bf221e1014e3..0ba06e0896d8 100644 --- a/langchain-core/src/runnables/remote.ts +++ b/langchain-core/src/runnables/remote.ts @@ -402,7 +402,7 @@ export class RemoteRunnable< ): AsyncGenerator { const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options); - const callbackManager_ = await getCallbackManagerForConfig(options); + const callbackManager_ = getCallbackManagerForConfig(options); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -473,7 +473,7 @@ export class RemoteRunnable< ): AsyncGenerator { const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options); - const callbackManager_ = await getCallbackManagerForConfig(options); + const callbackManager_ = getCallbackManagerForConfig(options); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -545,7 +545,7 @@ export class RemoteRunnable< const generator = async function* () { const [config, kwargs] = outerThis._separateRunnableConfigFromCallOptions(options); - const callbackManager_ = await getCallbackManagerForConfig(options); + const callbackManager_ = getCallbackManagerForConfig(options); const runManager = await callbackManager_?.handleChainStart( outerThis.toJSON(), _coerceToDict(input, "input"), diff --git a/langchain-core/src/runnables/types.ts b/langchain-core/src/runnables/types.ts index 0050a955f5b8..569e8aa26c0e 100644 --- a/langchain-core/src/runnables/types.ts +++ b/langchain-core/src/runnables/types.ts @@ -1,7 +1,7 @@ import type { z } from "zod"; -import type { RunnableConfig } from "./config.js"; import type { IterableReadableStreamInterface } from "../utils/stream.js"; import type { SerializableInterface } from "../load/serializable.js"; +import type { BaseCallbackConfig } from "../callbacks/manager.js"; export type RunnableBatchOptions = { /** @deprecated Pass in via the standard runnable config object instead */ @@ -73,3 +73,20 @@ export interface Node { id: string; data: RunnableIOSchema | RunnableInterface; } + +export interface RunnableConfig extends BaseCallbackConfig { + /** + * Runtime values for attributes previously made configurable on this Runnable, + * or sub-Runnables. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + configurable?: Record; + + /** + * Maximum number of times a call can recurse. If not provided, defaults to 25. + */ + recursionLimit?: number; + + /** Maximum number of parallel calls to make. */ + maxConcurrency?: number; +} diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index dd110a2a5172..a1f56323d924 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -1,6 +1,7 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { RunTree } from "langsmith"; import { isTracingEnabled } from "../utils/callbacks.js"; +import { CallbackManager } from "../callbacks/manager.js"; export interface AsyncLocalStorageInterface { getStore: () => any | undefined; @@ -33,12 +34,16 @@ class AsyncLocalStorageProvider { return storage.getStore()?.extra?._lc_runnable_config; } - runWithConfig( - config: any, - options: { parentRunId?: string; handlers?: any[] }, - callback: () => T - ): T { - const { parentRunId, handlers } = options; + runWithConfig(config: any, callback: () => T): T { + const callbackManager = CallbackManager._configureSync( + config?.callbacks, + undefined, + config?.tags, + undefined, + config?.metadata + ); + const parentRunId = callbackManager?.getParentRunId(); + const handlers = callbackManager?.handlers; const storage = this.getInstance(); const currentRunTree = storage.getStore(); let newRunTree; @@ -49,8 +54,8 @@ class AsyncLocalStorageProvider { _lc_runnable_config: config, }; } else { - const langChainTracer = handlers?.find( - (handler: any) => handler?.name === "langchain_tracer" + const langChainTracer: any = handlers?.find( + (handler) => handler?.name === "langchain_tracer" ); const tracingEnabled = isTracingEnabled() || !!langChainTracer; const parentRun = langChainTracer?.getRun?.(parentRunId); diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index 9533cb1222be..100a4f0467ac 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -143,7 +143,7 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a }); const outerRunId = v4(); const myFunc = async (input: string) => { - const outerCallbackManager = await getCallbackManagerForConfig( + const outerCallbackManager = getCallbackManagerForConfig( AsyncLocalStorageProviderSingleton.getRunnableConfig() ); expect(outerCallbackManager?.getParentRunId()).toEqual(outerRunId); diff --git a/langchain-core/src/tracers/initialize.ts b/langchain-core/src/tracers/initialize.ts index e956b5075aa1..3fcdfa36311e 100644 --- a/langchain-core/src/tracers/initialize.ts +++ b/langchain-core/src/tracers/initialize.ts @@ -1,4 +1,3 @@ -import { LangChainTracer } from "./tracer_langchain.js"; import { LangChainTracerV1 } from "./tracer_langchain_v1.js"; /** @@ -21,12 +20,3 @@ export async function getTracingCallbackHandler( } return tracer; } - -/** - * Function that returns an instance of `LangChainTracer`. It does not - * load any session data. - * @returns An instance of `LangChainTracer`. - */ -export async function getTracingV2CallbackHandler(): Promise { - return new LangChainTracer(); -} diff --git a/langchain-core/src/utils/stream.ts b/langchain-core/src/utils/stream.ts index 16485fd12bb2..beb656b620a5 100644 --- a/langchain-core/src/utils/stream.ts +++ b/langchain-core/src/utils/stream.ts @@ -1,9 +1,8 @@ -// Make this a type to override ReadableStream's async iterator type in case -// the popular web-streams-polyfill is imported - the supplied types -import { BaseCallbackHandler } from "../callbacks/base.js"; import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; -// in this case don't quite match. +// Make this a type to override ReadableStream's async iterator type in case +// the popular web-streams-polyfill is imported - the supplied types +// in that case don't quite match. export type IterableReadableStreamInterface = ReadableStream & AsyncIterable; @@ -187,10 +186,6 @@ export class AsyncGeneratorWithSetup< public config?: unknown; - private parentRunId?: string; - - private handlers?: BaseCallbackHandler[]; - private firstResult: Promise>; private firstResultUsed = false; @@ -199,13 +194,9 @@ export class AsyncGeneratorWithSetup< generator: AsyncGenerator; startSetup?: () => Promise; config?: unknown; - parentRunId?: string; - handlers?: BaseCallbackHandler[]; }) { this.generator = params.generator; this.config = params.config; - this.parentRunId = params.parentRunId; - this.handlers = params.handlers; // setup is a promise that resolves only after the first iterator value // is available. this is useful when setup of several piped generators // needs to happen in logical order, ie. in the order in which input to @@ -213,7 +204,6 @@ export class AsyncGeneratorWithSetup< this.setup = new Promise((resolve, reject) => { void AsyncLocalStorageProviderSingleton.runWithConfig( params.config, - { parentRunId: this.parentRunId, handlers: this.handlers }, async () => { this.firstResult = params.generator.next(); if (params.startSetup) { @@ -234,7 +224,6 @@ export class AsyncGeneratorWithSetup< return AsyncLocalStorageProviderSingleton.runWithConfig( this.config, - { parentRunId: this.parentRunId, handlers: this.handlers }, async () => { return this.generator.next(...args); } @@ -273,15 +262,11 @@ export async function pipeGeneratorWithSetup< ) => AsyncGenerator, generator: AsyncGenerator, startSetup: () => Promise, - parentRunId?: string, - handlers?: BaseCallbackHandler[], ...args: A ) { const gen = new AsyncGeneratorWithSetup({ generator, startSetup, - parentRunId, - handlers, }); const setup = await gen.setup; return { output: to(gen, setup, ...args), setup }; diff --git a/libs/langchain-scripts/src/_data/importMap.ts b/libs/langchain-scripts/src/_data/importMap.ts index 1bbb79ef46e9..c21396575068 100644 --- a/libs/langchain-scripts/src/_data/importMap.ts +++ b/libs/langchain-scripts/src/_data/importMap.ts @@ -625,11 +625,6 @@ export const importMap: Array = [ new: "@langchain/core/tracers/initialize", namedImport: "getTracingCallbackHandler", }, - { - old: "langchain/callbacks/*", - new: "@langchain/core/tracers/initialize", - namedImport: "getTracingV2CallbackHandler", - }, { old: "langchain/callbacks/*", new: "@langchain/core/tracers/console", From 3b074b7f10d4e7fdbd1c31a4c4c82d5bde8a190f Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 7 Jul 2024 01:40:52 -0700 Subject: [PATCH 07/31] Reduce changes --- langchain-core/src/runnables/base.ts | 14 +++++++------- langchain-core/src/runnables/branch.ts | 2 +- langchain-core/src/runnables/config.ts | 2 +- langchain-core/src/runnables/remote.ts | 6 +++--- .../singletons/tests/async_local_storage.test.ts | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/langchain-core/src/runnables/base.ts b/langchain-core/src/runnables/base.ts index 52ca00841d34..f16139a0d31f 100644 --- a/langchain-core/src/runnables/base.ts +++ b/langchain-core/src/runnables/base.ts @@ -357,7 +357,7 @@ export abstract class Runnable< options?: Partial & { runType?: string } ) { const config = ensureConfig(options); - const callbackManager_ = getCallbackManagerForConfig(config); + const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -466,7 +466,7 @@ export abstract class Runnable< let finalOutputSupported = true; const config = ensureConfig(options); - const callbackManager_ = getCallbackManagerForConfig(config); + const callbackManager_ = await getCallbackManagerForConfig(config); async function* wrapInputForTracing() { for await (const chunk of inputGenerator) { if (finalInputSupported) { @@ -1660,7 +1660,7 @@ export class RunnableSequence< async invoke(input: RunInput, options?: RunnableConfig): Promise { const config = ensureConfig(options); - const callbackManager_ = getCallbackManagerForConfig(config); + const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -1773,7 +1773,7 @@ export class RunnableSequence< input: RunInput, options?: RunnableConfig ): AsyncGenerator { - const callbackManager_ = getCallbackManagerForConfig(options); + const callbackManager_ = await getCallbackManagerForConfig(options); const { runId, ...otherOptions } = options ?? {}; const runManager = await callbackManager_?.handleChainStart( this.toJSON(), @@ -1969,7 +1969,7 @@ export class RunnableMap< options?: Partial ): Promise { const config = ensureConfig(options); - const callbackManager_ = getCallbackManagerForConfig(config); + const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), { @@ -2098,7 +2098,7 @@ export class RunnableTraceable extends Runnable< async invoke(input: RunInput, options?: Partial) { const [config] = this._getOptionsList(options ?? {}, 1); - const callbacks = getCallbackManagerForConfig(config); + const callbacks = await getCallbackManagerForConfig(config); return (await this.func( patchConfig(config, { callbacks }), @@ -2431,7 +2431,7 @@ export class RunnableWithFallbacks extends Runnable< options?: Partial ): Promise { const config = ensureConfig(options); - const callbackManager_ = getCallbackManagerForConfig(options); + const callbackManager_ = await getCallbackManagerForConfig(options); const { runId, ...otherConfigFields } = config; const runManager = await callbackManager_?.handleChainStart( this.toJSON(), diff --git a/langchain-core/src/runnables/branch.ts b/langchain-core/src/runnables/branch.ts index 7585a744746d..c9670aa54976 100644 --- a/langchain-core/src/runnables/branch.ts +++ b/langchain-core/src/runnables/branch.ts @@ -186,7 +186,7 @@ export class RunnableBranch extends Runnable< } async *_streamIterator(input: RunInput, config?: Partial) { - const callbackManager_ = getCallbackManagerForConfig(config); + const callbackManager_ = await getCallbackManagerForConfig(config); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), diff --git a/langchain-core/src/runnables/config.ts b/langchain-core/src/runnables/config.ts index fb92e1338d72..f2a1bb412ba2 100644 --- a/langchain-core/src/runnables/config.ts +++ b/langchain-core/src/runnables/config.ts @@ -167,7 +167,7 @@ export function patchConfig( return newConfig; } -export function getCallbackManagerForConfig(config?: RunnableConfig) { +export async function getCallbackManagerForConfig(config?: RunnableConfig) { return CallbackManager._configureSync( config?.callbacks, undefined, diff --git a/langchain-core/src/runnables/remote.ts b/langchain-core/src/runnables/remote.ts index 0ba06e0896d8..bf221e1014e3 100644 --- a/langchain-core/src/runnables/remote.ts +++ b/langchain-core/src/runnables/remote.ts @@ -402,7 +402,7 @@ export class RemoteRunnable< ): AsyncGenerator { const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options); - const callbackManager_ = getCallbackManagerForConfig(options); + const callbackManager_ = await getCallbackManagerForConfig(options); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -473,7 +473,7 @@ export class RemoteRunnable< ): AsyncGenerator { const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options); - const callbackManager_ = getCallbackManagerForConfig(options); + const callbackManager_ = await getCallbackManagerForConfig(options); const runManager = await callbackManager_?.handleChainStart( this.toJSON(), _coerceToDict(input, "input"), @@ -545,7 +545,7 @@ export class RemoteRunnable< const generator = async function* () { const [config, kwargs] = outerThis._separateRunnableConfigFromCallOptions(options); - const callbackManager_ = getCallbackManagerForConfig(options); + const callbackManager_ = await getCallbackManagerForConfig(options); const runManager = await callbackManager_?.handleChainStart( outerThis.toJSON(), _coerceToDict(input, "input"), diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index 100a4f0467ac..9533cb1222be 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -143,7 +143,7 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a }); const outerRunId = v4(); const myFunc = async (input: string) => { - const outerCallbackManager = getCallbackManagerForConfig( + const outerCallbackManager = await getCallbackManagerForConfig( AsyncLocalStorageProviderSingleton.getRunnableConfig() ); expect(outerCallbackManager?.getParentRunId()).toEqual(outerRunId); From 9d6c1936513fee251f95329c80e84dda84d550d5 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 7 Jul 2024 01:45:52 -0700 Subject: [PATCH 08/31] Reduce diff --- langchain-core/src/runnables/config.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/langchain-core/src/runnables/config.ts b/langchain-core/src/runnables/config.ts index f2a1bb412ba2..56486b2dd1f5 100644 --- a/langchain-core/src/runnables/config.ts +++ b/langchain-core/src/runnables/config.ts @@ -6,6 +6,16 @@ export const DEFAULT_RECURSION_LIMIT = 25; export { type RunnableConfig }; +export async function getCallbackManagerForConfig(config?: RunnableConfig) { + return CallbackManager._configureSync( + config?.callbacks, + undefined, + config?.tags, + undefined, + config?.metadata + ); +} + export function mergeConfigs( ...configs: (CallOptions | RunnableConfig | undefined | null)[] ): Partial { @@ -166,13 +176,3 @@ export function patchConfig( } return newConfig; } - -export async function getCallbackManagerForConfig(config?: RunnableConfig) { - return CallbackManager._configureSync( - config?.callbacks, - undefined, - config?.tags, - undefined, - config?.metadata - ); -} From 07c24f6c6d5280b8a23290dd3433864b7d58b7af Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 7 Jul 2024 01:57:01 -0700 Subject: [PATCH 09/31] Readd exposed method --- langchain-core/src/tracers/initialize.ts | 12 ++++++++++++ libs/langchain-scripts/src/_data/importMap.ts | 5 +++++ 2 files changed, 17 insertions(+) diff --git a/langchain-core/src/tracers/initialize.ts b/langchain-core/src/tracers/initialize.ts index 3fcdfa36311e..fa55fd2f36a6 100644 --- a/langchain-core/src/tracers/initialize.ts +++ b/langchain-core/src/tracers/initialize.ts @@ -1,3 +1,4 @@ +import { LangChainTracer } from "./tracer_langchain.js"; import { LangChainTracerV1 } from "./tracer_langchain_v1.js"; /** @@ -20,3 +21,14 @@ export async function getTracingCallbackHandler( } return tracer; } + +/** + * @deprecated Instantiate directly using the LangChainTracer constructor. + * + * Function that returns an instance of `LangChainTracer`. It does not + * load any session data. + * @returns An instance of `LangChainTracer`. + */ +export async function getTracingV2CallbackHandler(): Promise { + return new LangChainTracer(); +} diff --git a/libs/langchain-scripts/src/_data/importMap.ts b/libs/langchain-scripts/src/_data/importMap.ts index c21396575068..1bbb79ef46e9 100644 --- a/libs/langchain-scripts/src/_data/importMap.ts +++ b/libs/langchain-scripts/src/_data/importMap.ts @@ -625,6 +625,11 @@ export const importMap: Array = [ new: "@langchain/core/tracers/initialize", namedImport: "getTracingCallbackHandler", }, + { + old: "langchain/callbacks/*", + new: "@langchain/core/tracers/initialize", + namedImport: "getTracingV2CallbackHandler", + }, { old: "langchain/callbacks/*", new: "@langchain/core/tracers/console", From 528cbe1572e3c63584f5a894f8692e83650d392d Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Mon, 15 Jul 2024 16:30:04 -0700 Subject: [PATCH 10/31] Merge config --- langchain-core/src/runnables/config.ts | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/langchain-core/src/runnables/config.ts b/langchain-core/src/runnables/config.ts index 56486b2dd1f5..eb476ae6b338 100644 --- a/langchain-core/src/runnables/config.ts +++ b/langchain-core/src/runnables/config.ts @@ -101,15 +101,18 @@ const PRIMITIVES = new Set(["string", "number", "boolean"]); /** * Ensure that a passed config is an object with all required keys present. - * - * Note: To make sure async local storage loading works correctly, this - * should not be called with a default or prepopulated config argument. */ export function ensureConfig( config?: CallOptions ): CallOptions { + const inheritedConfig = + AsyncLocalStorageProviderSingleton.getRunnableConfig(); + const loadedConfig = - config ?? AsyncLocalStorageProviderSingleton.getRunnableConfig(); + inheritedConfig && config + ? { ...inheritedConfig, ...config } + : config ?? inheritedConfig; + let empty: RunnableConfig = { tags: [], metadata: {}, From 038ad0e6b90d0f9c4b1f2db2f1baa57fbc99f266 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Mon, 15 Jul 2024 20:41:22 -0700 Subject: [PATCH 11/31] Check if we should include the tracer depending on the context --- langchain-core/src/callbacks/manager.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 65f5c2636ac2..6d72a7685203 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -1,4 +1,5 @@ import { v4 as uuidv4 } from "uuid"; +import { AsyncLocalStorageProviderSingleton } from "langsmith/singletons/traceable"; import { AgentAction, AgentFinish } from "../agents.js"; import type { ChainValues } from "../utils/types/index.js"; import { LLMResult } from "../outputs.js"; @@ -1009,7 +1010,9 @@ export class CallbackManager const verboseEnabled = getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" || options?.verbose; - const tracingV2Enabled = isTracingEnabled(); + const tracingV2Enabled = + AsyncLocalStorageProviderSingleton.getInstance().getStore() + ?.tracingEnabled || isTracingEnabled(); const tracingEnabled = tracingV2Enabled || From 80ca236475c529f2c2222e938b49aefcc194d841 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Mon, 15 Jul 2024 21:05:18 -0700 Subject: [PATCH 12/31] Reconstruct the run tree from run map --- langchain-core/src/singletons/index.ts | 55 ++++++++----------- .../src/tracers/tracer_langchain.ts | 45 +++++++++++++++ 2 files changed, 67 insertions(+), 33 deletions(-) diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index a1f56323d924..d8db40bfbc41 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -1,7 +1,6 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ -import { RunTree } from "langsmith"; -import { isTracingEnabled } from "../utils/callbacks.js"; import { CallbackManager } from "../callbacks/manager.js"; +import { LangChainTracer } from "../tracers/tracer_langchain.js"; export interface AsyncLocalStorageInterface { getStore: () => any | undefined; @@ -31,7 +30,10 @@ class AsyncLocalStorageProvider { getRunnableConfig() { const storage = this.getInstance(); - return storage.getStore()?.extra?._lc_runnable_config; + // this has the runnable config + // which means that I should also have an instance of a LangChainTracer + // with the run map prepopulated + return storage.getStore()?.extra?.[Symbol.for("lc:child_config")]; } runWithConfig(config: any, callback: () => T): T { @@ -42,39 +44,26 @@ class AsyncLocalStorageProvider { undefined, config?.metadata ); - const parentRunId = callbackManager?.getParentRunId(); - const handlers = callbackManager?.handlers; const storage = this.getInstance(); - const currentRunTree = storage.getStore(); - let newRunTree; - if (currentRunTree !== undefined && currentRunTree.id === config?.run_id) { - newRunTree = currentRunTree; - newRunTree.extra = { - ...newRunTree.extra, - _lc_runnable_config: config, + const parentRunId = callbackManager?.getParentRunId(); + + const langChainTracer = callbackManager?.handlers?.find( + (handler) => handler?.name === "langchain_tracer" + ) as LangChainTracer | undefined; + + const runTree = + langChainTracer && parentRunId + ? langChainTracer.convertToRunTree(parentRunId) + : undefined; + + if (runTree) { + runTree.extra = { + ...runTree.extra, + [Symbol.for("lc:child_config")]: config, }; - } else { - const langChainTracer: any = handlers?.find( - (handler) => handler?.name === "langchain_tracer" - ); - const tracingEnabled = isTracingEnabled() || !!langChainTracer; - const parentRun = langChainTracer?.getRun?.(parentRunId); - const projectName = langChainTracer?.projectName; - const client = langChainTracer?.client; - newRunTree = new RunTree({ - client, - tracingEnabled, - id: config?.run_id, - parent_run_id: parentRun?.id, - project_name: projectName, - name: config?.runName ?? "", - extra: { - metadata: { ...config?.metadata }, - _lc_runnable_config: config, - }, - }); } - return storage.run(newRunTree, callback); + + return storage.run(runTree, callback); } initializeGlobalInstance(instance: AsyncLocalStorageInterface) { diff --git a/langchain-core/src/tracers/tracer_langchain.ts b/langchain-core/src/tracers/tracer_langchain.ts index a85ddf29a523..af748828930c 100644 --- a/langchain-core/src/tracers/tracer_langchain.ts +++ b/langchain-core/src/tracers/tracer_langchain.ts @@ -147,4 +147,49 @@ export class LangChainTracer return undefined; } } + + convertToRunTree(id: string): RunTree | undefined { + // create a run tree from a run map + const runTreeMap: Record = {}; + const runTreeList: [id: string, dotted_order: string | undefined][] = []; + for (const [id, run] of this.runMap) { + // TODO: this loses object reference equality + // wrap it in a proxy to copy properties back to the original run map + const runTree = new RunTree({ + ...run, + child_runs: [], + parent_run: undefined, + + // inherited properties + client: this.client, + project_name: this.projectName, + reference_example_id: this.exampleId, + tracingEnabled: true, + }); + + runTreeMap[id] = runTree; + runTreeList.push([id, run.dotted_order]); + } + + runTreeList.sort((a, b) => { + if (!a[1] || !b[1]) return 0; + return a[1].localeCompare(b[1]); + }); + + for (const [id] of runTreeList) { + const run = this.runMap.get(id); + const runTree = runTreeMap[id]; + if (!run || !runTree) continue; + + if (run.parent_run_id) { + const parentRunTree = runTreeMap[run.parent_run_id]; + if (parentRunTree) { + parentRunTree.child_runs.push(runTree); + runTree.parent_run = parentRunTree; + } + } + } + + return runTreeMap[id]; + } } From 27d93a05299f3046306188d6f82a631fedd24f9f Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 11:27:51 -0700 Subject: [PATCH 13/31] Update note --- langchain-core/src/tracers/tracer_langchain.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/langchain-core/src/tracers/tracer_langchain.ts b/langchain-core/src/tracers/tracer_langchain.ts index af748828930c..3fe3bbed3488 100644 --- a/langchain-core/src/tracers/tracer_langchain.ts +++ b/langchain-core/src/tracers/tracer_langchain.ts @@ -153,8 +153,10 @@ export class LangChainTracer const runTreeMap: Record = {}; const runTreeList: [id: string, dotted_order: string | undefined][] = []; for (const [id, run] of this.runMap) { - // TODO: this loses object reference equality - // wrap it in a proxy to copy properties back to the original run map + // by converting the run map to a run tree, we are doing a copy + // thus, any mutation performed on the run tree will not be reflected + // back in the run map + // TODO: Stop using `this.runMap` in favour of LangSmith's `RunTree` const runTree = new RunTree({ ...run, child_runs: [], From 83c81e84ccf870b2299453d600ff9999ed2a50e0 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 11:30:27 -0700 Subject: [PATCH 14/31] Use a symbol --- langchain-core/src/singletons/index.ts | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index d8db40bfbc41..cf6141d28bfd 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -20,12 +20,11 @@ export class MockAsyncLocalStorage implements AsyncLocalStorageInterface { const mockAsyncLocalStorage = new MockAsyncLocalStorage(); +const TRACING_ALS_KEY = Symbol.for("ls:tracing_async_local_storage"); + class AsyncLocalStorageProvider { getInstance(): AsyncLocalStorageInterface { - return ( - (globalThis as any).__lc_tracing_async_local_storage_v2 ?? - mockAsyncLocalStorage - ); + return (globalThis as any)[TRACING_ALS_KEY] ?? mockAsyncLocalStorage; } getRunnableConfig() { @@ -67,8 +66,8 @@ class AsyncLocalStorageProvider { } initializeGlobalInstance(instance: AsyncLocalStorageInterface) { - if ((globalThis as any).__lc_tracing_async_local_storage_v2 === undefined) { - (globalThis as any).__lc_tracing_async_local_storage_v2 = instance; + if ((globalThis as any)[TRACING_ALS_KEY] === undefined) { + (globalThis as any)[TRACING_ALS_KEY] = instance; } } } From c4f73fa9368f754c599077269de7f0ab8f3e8b88 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 11:33:36 -0700 Subject: [PATCH 15/31] Fix ALS test --- langchain-core/src/singletons/tests/async_local_storage.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langchain-core/src/singletons/tests/async_local_storage.test.ts b/langchain-core/src/singletons/tests/async_local_storage.test.ts index b08ad1fdcff5..4cb5a2ea77f9 100644 --- a/langchain-core/src/singletons/tests/async_local_storage.test.ts +++ b/langchain-core/src/singletons/tests/async_local_storage.test.ts @@ -166,7 +166,7 @@ test("Runnable streamEvents method with streaming nested in a RunnableLambda", a const nestedLambdaWithoutOverriddenCallbacks = RunnableLambda.from( async (_: string, config) => { const innerCallbackManager = await getCallbackManagerForConfig( - asyncLocalStorage.getStore() + AsyncLocalStorageProviderSingleton.getRunnableConfig() ); expect(innerCallbackManager?.getParentRunId()).toEqual(innerRunId2); expect(config?.callbacks?.handlers).toContain(dummyHandler); From 02109759dd4bb0a98a05208d1d56f0b4301993d8 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 11:46:44 -0700 Subject: [PATCH 16/31] Use the ALS singleton from LC --- langchain-core/src/callbacks/manager.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 14b6359bac66..1e27d8605329 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -1,5 +1,5 @@ import { v4 as uuidv4 } from "uuid"; -import { AsyncLocalStorageProviderSingleton } from "langsmith/singletons/traceable"; +import type { RunTree } from "langsmith"; import { AgentAction, AgentFinish } from "../agents.js"; import type { ChainValues } from "../utils/types/index.js"; import { LLMResult } from "../outputs.js"; @@ -21,6 +21,7 @@ import { consumeCallback } from "./promises.js"; import { Serialized } from "../load/serializable.js"; import type { DocumentInterface } from "../documents/document.js"; import { isTracingEnabled } from "../utils/callbacks.js"; +import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; if ( /* #__PURE__ */ isTracingEnabled() && @@ -1047,9 +1048,10 @@ export class CallbackManager const verboseEnabled = getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" || options?.verbose; - const tracingV2Enabled = - AsyncLocalStorageProviderSingleton.getInstance().getStore() - ?.tracingEnabled || isTracingEnabled(); + + const contextRun: RunTree | undefined = + AsyncLocalStorageProviderSingleton.getInstance().getStore(); + const tracingV2Enabled = contextRun?.tracingEnabled || isTracingEnabled(); const tracingEnabled = tracingV2Enabled || From 1746136088e8f772fecfb6f12c43e09bb1811b38 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 12:11:24 -0700 Subject: [PATCH 17/31] Remove circular dep --- langchain-core/src/callbacks/manager.ts | 11 +++++------ langchain-core/src/tracers/tracer_langchain.ts | 18 +++++++++--------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 1e27d8605329..05033c8d568f 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -1,5 +1,4 @@ import { v4 as uuidv4 } from "uuid"; -import type { RunTree } from "langsmith"; import { AgentAction, AgentFinish } from "../agents.js"; import type { ChainValues } from "../utils/types/index.js"; import { LLMResult } from "../outputs.js"; @@ -21,7 +20,6 @@ import { consumeCallback } from "./promises.js"; import { Serialized } from "../load/serializable.js"; import type { DocumentInterface } from "../documents/document.js"; import { isTracingEnabled } from "../utils/callbacks.js"; -import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; if ( /* #__PURE__ */ isTracingEnabled() && @@ -1049,9 +1047,9 @@ export class CallbackManager getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" || options?.verbose; - const contextRun: RunTree | undefined = - AsyncLocalStorageProviderSingleton.getInstance().getStore(); - const tracingV2Enabled = contextRun?.tracingEnabled || isTracingEnabled(); + const tracingV2Enabled = + LangChainTracer.getTraceableRunTree()?.tracingEnabled || + isTracingEnabled(); const tracingEnabled = tracingV2Enabled || @@ -1082,7 +1080,8 @@ export class CallbackManager // handoff between langchain and langsmith/traceable // override the parent run ID callbackManager._parentRunId = - tracerV2.getTraceableRunTree()?.id ?? callbackManager._parentRunId; + LangChainTracer.getTraceableRunTree()?.id ?? + callbackManager._parentRunId; } } } diff --git a/langchain-core/src/tracers/tracer_langchain.ts b/langchain-core/src/tracers/tracer_langchain.ts index 3fe3bbed3488..17a9677a3792 100644 --- a/langchain-core/src/tracers/tracer_langchain.ts +++ b/langchain-core/src/tracers/tracer_langchain.ts @@ -64,7 +64,7 @@ export class LangChainTracer // if we're inside traceable, we can obtain the traceable tree // and populate the run map, which is used to correctly // infer dotted order and execution order - const traceableTree = this.getTraceableRunTree(); + const traceableTree = LangChainTracer.getTraceableRunTree(); if (traceableTree) { let rootRun: RunTree = traceableTree; const visited = new Set(); @@ -140,14 +140,6 @@ export class LangChainTracer return this.runMap.get(id); } - getTraceableRunTree(): RunTree | undefined { - try { - return getCurrentRunTree(); - } catch { - return undefined; - } - } - convertToRunTree(id: string): RunTree | undefined { // create a run tree from a run map const runTreeMap: Record = {}; @@ -194,4 +186,12 @@ export class LangChainTracer return runTreeMap[id]; } + + static getTraceableRunTree(): RunTree | undefined { + try { + return getCurrentRunTree(); + } catch { + return undefined; + } + } } From d34877979369f3c7c2d3638789c002d35637b546 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 14:24:41 -0700 Subject: [PATCH 18/31] Expose method for updating tracer from run tree --- .../src/tracers/tracer_langchain.ts | 62 +++++++++---------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/langchain-core/src/tracers/tracer_langchain.ts b/langchain-core/src/tracers/tracer_langchain.ts index 17a9677a3792..3e4fa6a54bb7 100644 --- a/langchain-core/src/tracers/tracer_langchain.ts +++ b/langchain-core/src/tracers/tracer_langchain.ts @@ -61,38 +61,9 @@ export class LangChainTracer this.exampleId = exampleId; this.client = client ?? new Client({}); - // if we're inside traceable, we can obtain the traceable tree - // and populate the run map, which is used to correctly - // infer dotted order and execution order const traceableTree = LangChainTracer.getTraceableRunTree(); if (traceableTree) { - let rootRun: RunTree = traceableTree; - const visited = new Set(); - while (rootRun.parent_run) { - if (visited.has(rootRun.id)) break; - visited.add(rootRun.id); - - if (!rootRun.parent_run) break; - rootRun = rootRun.parent_run as RunTree; - } - visited.clear(); - - const queue = [rootRun]; - while (queue.length > 0) { - const current = queue.shift(); - if (!current || visited.has(current.id)) continue; - visited.add(current.id); - - // @ts-expect-error Types of property 'events' are incompatible. - this.runMap.set(current.id, current); - if (current.child_runs) { - queue.push(...current.child_runs); - } - } - - this.client = traceableTree.client ?? this.client; - this.projectName = traceableTree.project_name ?? this.projectName; - this.exampleId = traceableTree.reference_example_id ?? this.exampleId; + this.updateFromRunTree(traceableTree); } } @@ -140,8 +111,37 @@ export class LangChainTracer return this.runMap.get(id); } + updateFromRunTree(runTree: RunTree) { + let rootRun: RunTree = runTree; + const visited = new Set(); + while (rootRun.parent_run) { + if (visited.has(rootRun.id)) break; + visited.add(rootRun.id); + + if (!rootRun.parent_run) break; + rootRun = rootRun.parent_run as RunTree; + } + visited.clear(); + + const queue = [rootRun]; + while (queue.length > 0) { + const current = queue.shift(); + if (!current || visited.has(current.id)) continue; + visited.add(current.id); + + // @ts-expect-error Types of property 'events' are incompatible. + this.runMap.set(current.id, current); + if (current.child_runs) { + queue.push(...current.child_runs); + } + } + + this.client = runTree.client ?? this.client; + this.projectName = runTree.project_name ?? this.projectName; + this.exampleId = runTree.reference_example_id ?? this.exampleId; + } + convertToRunTree(id: string): RunTree | undefined { - // create a run tree from a run map const runTreeMap: Record = {}; const runTreeList: [id: string, dotted_order: string | undefined][] = []; for (const [id, run] of this.runMap) { From 03efaa013b6ac027f2ec9df2245c46d85c780bbb Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 14:56:43 -0700 Subject: [PATCH 19/31] Send a dummy run tree in case there's no tracer present --- langchain-core/src/singletons/index.ts | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index cf6141d28bfd..f1d6801431a0 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -1,4 +1,5 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ +import { RunTree } from "langsmith"; import { CallbackManager } from "../callbacks/manager.js"; import { LangChainTracer } from "../tracers/tracer_langchain.js"; @@ -21,6 +22,7 @@ export class MockAsyncLocalStorage implements AsyncLocalStorageInterface { const mockAsyncLocalStorage = new MockAsyncLocalStorage(); const TRACING_ALS_KEY = Symbol.for("ls:tracing_async_local_storage"); +const LC_CHILD_KEY = Symbol.for("lc:child_config"); class AsyncLocalStorageProvider { getInstance(): AsyncLocalStorageInterface { @@ -32,7 +34,7 @@ class AsyncLocalStorageProvider { // this has the runnable config // which means that I should also have an instance of a LangChainTracer // with the run map prepopulated - return storage.getStore()?.extra?.[Symbol.for("lc:child_config")]; + return storage.getStore()?.extra?.[LC_CHILD_KEY]; } runWithConfig(config: any, callback: () => T): T { @@ -53,13 +55,10 @@ class AsyncLocalStorageProvider { const runTree = langChainTracer && parentRunId ? langChainTracer.convertToRunTree(parentRunId) - : undefined; + : new RunTree({ name: "", tracingEnabled: false }); if (runTree) { - runTree.extra = { - ...runTree.extra, - [Symbol.for("lc:child_config")]: config, - }; + runTree.extra = { ...runTree.extra, [LC_CHILD_KEY]: config }; } return storage.run(runTree, callback); From 347dcb3f5a8738fc1dd5704a92564c942e3fb516 Mon Sep 17 00:00:00 2001 From: Tat Dat Duong Date: Tue, 16 Jul 2024 15:05:47 -0700 Subject: [PATCH 20/31] Prevent inheriting runName --- langchain-core/src/runnables/config.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/langchain-core/src/runnables/config.ts b/langchain-core/src/runnables/config.ts index 98ac17108d9a..409d556eac8d 100644 --- a/langchain-core/src/runnables/config.ts +++ b/langchain-core/src/runnables/config.ts @@ -113,10 +113,10 @@ export function ensureConfig( runId: undefined, }; if (implicitConfig) { - // Don't allow runId to be loaded implicitly, as this can cause + // Don't allow runId and runName to be loaded implicitly, as this can cause // child runs to improperly inherit their parents' run ids. // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { runId, ...rest } = implicitConfig; + const { runId, runName, ...rest } = implicitConfig; empty = Object.entries(rest).reduce( // eslint-disable-next-line @typescript-eslint/no-explicit-any (currentConfig: Record, [key, value]) => { From b5fa41fb89fce152795fbaa7461c5eb31370a06b Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Thu, 18 Jul 2024 14:13:39 -0700 Subject: [PATCH 21/31] Make sync operations in base tracer execute first --- .../src/callbacks/tests/callbacks.test.ts | 4 + .../src/runnables/tests/tracing.int.test.ts | 49 ++++++ langchain-core/src/tracers/base.ts | 154 +++++++++--------- package.json | 1 + yarn.lock | 8 +- 5 files changed, 136 insertions(+), 80 deletions(-) create mode 100644 langchain-core/src/runnables/tests/tracing.int.test.ts diff --git a/langchain-core/src/callbacks/tests/callbacks.test.ts b/langchain-core/src/callbacks/tests/callbacks.test.ts index 4f24161f9fcc..58644caa40e1 100644 --- a/langchain-core/src/callbacks/tests/callbacks.test.ts +++ b/langchain-core/src/callbacks/tests/callbacks.test.ts @@ -1,3 +1,4 @@ +/* eslint-disable no-promise-executor-return */ import { test, expect } from "@jest/globals"; import * as uuid from "uuid"; import { CallbackManager } from "../manager.js"; @@ -202,6 +203,9 @@ test("CallbackManager", async () => { ]); await retrieverCb.handleRetrieverError(new Error("test")); + // In case background mode is on while running this test + await new Promise((resolve) => setTimeout(resolve, 100)); + for (const handler of [handler1, handler2]) { expect(handler.starts).toBe(5); expect(handler.ends).toBe(5); diff --git a/langchain-core/src/runnables/tests/tracing.int.test.ts b/langchain-core/src/runnables/tests/tracing.int.test.ts new file mode 100644 index 000000000000..c64c6069380d --- /dev/null +++ b/langchain-core/src/runnables/tests/tracing.int.test.ts @@ -0,0 +1,49 @@ +// import { traceable } from "langsmith/traceable"; + +// import { RunnableLambda, RunnableMap } from "../../runnables/base.js"; +// import { AIMessage, BaseMessage, HumanMessage } from "../../messages/index.js"; +// import { LangChainTracer } from "../../tracers/tracer_langchain.js"; +// // import { Client } from "langsmith"; +// // const client = new Client({ +// // apiKey: "ls__5ee8c61b353f4ba6912ffefe40088608", +// // apiUrl: "http://127.0.0.1:1984", +// // }); + +// test("x", async () => { +// const tracer = new LangChainTracer({}); + +// const concatMsg = traceable( +// async (msg: string, name = "world") => { +// const nested = RunnableLambda.from(async () => { +// return "nested"; +// }); +// await nested.invoke({}); +// return `${msg} ${name}`; +// }, +// { +// name: "concat_msg", +// } +// ); + +// const aiGreet = traceable( +// async (msg: BaseMessage, name = "world") => +// new AIMessage({ content: await concatMsg(msg.content as string, name) }), +// { name: "aiGreet" } +// ); + +// const root = RunnableLambda.from(async (messages: BaseMessage[]) => { +// const lastMsg = messages.at(-1) as HumanMessage; +// const greetOne = await aiGreet(lastMsg, "David"); +// const greetTwo = await aiGreet(lastMsg, "Pavel"); + +// return [greetOne, greetTwo]; +// }); + +// const map = RunnableMap.from({ messages: root }); + +// console.log( +// await map.invoke([new HumanMessage({ content: "Hello!" })], { +// callbacks: [tracer], +// }) +// ); +// }); diff --git a/langchain-core/src/tracers/base.ts b/langchain-core/src/tracers/base.ts index 69cc098cdc55..f44222b6aacb 100644 --- a/langchain-core/src/tracers/base.ts +++ b/langchain-core/src/tracers/base.ts @@ -90,7 +90,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { parentRun.child_runs.push(childRun); } - protected async _startTrace(run: Run) { + protected _startTrace(run: Run): Promise { const currentDottedOrder = convertToDottedOrderFormat( run.start_time, run.id, @@ -126,7 +126,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { storedRun.dotted_order = currentDottedOrder; } this.runMap.set(storedRun.id, storedRun); - await this.onRunCreate?.(storedRun); + return this.onRunCreate?.(storedRun) ?? Promise.resolve(); } protected async _endTrace(run: Run): Promise { @@ -154,7 +154,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { return parentRun.child_execution_order + 1; } - async handleLLMStart( + handleLLMStart( llm: Serialized, prompts: string[], runId: string, @@ -190,12 +190,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - await this._startTrace(run); - await this.onLLMStart?.(run); - return run; + return this._startTrace(run) + .then(() => this.onLLMStart?.(run)) + .then(() => run); } - async handleChatModelStart( + handleChatModelStart( llm: Serialized, messages: BaseMessage[][], runId: string, @@ -231,15 +231,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - await this._startTrace(run); - await this.onLLMStart?.(run); - return run; + return this._startTrace(run) + .then(() => this.onLLMStart?.(run)) + .then(() => run); } - async handleLLMEnd(output: LLMResult, runId: string): Promise { + handleLLMEnd(output: LLMResult, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") { - throw new Error("No LLM run to end."); + return Promise.reject(new Error("No LLM run to end.")); } run.end_time = Date.now(); run.outputs = output; @@ -247,15 +247,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "end", time: new Date(run.end_time).toISOString(), }); - await this.onLLMEnd?.(run); - await this._endTrace(run); - return run; + return (this.onLLMEnd?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleLLMError(error: unknown, runId: string): Promise { + handleLLMError(error: unknown, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") { - throw new Error("No LLM run to end."); + return Promise.reject(new Error("No LLM run to end.")); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -263,12 +263,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "error", time: new Date(run.end_time).toISOString(), }); - await this.onLLMError?.(run); - await this._endTrace(run); - return run; + return (this.onLLMError?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleChainStart( + handleChainStart( chain: Serialized, inputs: ChainValues, runId: string, @@ -300,12 +300,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: metadata ? { metadata } : {}, tags: tags || [], }; - await this._startTrace(run); - await this.onChainStart?.(run); - return run; + return this._startTrace(run) + .then(() => this.onChainStart?.(run)) + .then(() => run); } - async handleChainEnd( + handleChainEnd( outputs: ChainValues, runId: string, _parentRunId?: string, @@ -314,7 +314,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { ): Promise { const run = this.runMap.get(runId); if (!run) { - throw new Error("No chain run to end."); + return Promise.reject(new Error("No chain run to end.")); } run.end_time = Date.now(); run.outputs = _coerceToDict(outputs, "output"); @@ -325,12 +325,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { if (kwargs?.inputs !== undefined) { run.inputs = _coerceToDict(kwargs.inputs, "input"); } - await this.onChainEnd?.(run); - await this._endTrace(run); - return run; + return (this.onChainEnd?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleChainError( + handleChainError( error: unknown, runId: string, _parentRunId?: string, @@ -339,7 +339,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { ): Promise { const run = this.runMap.get(runId); if (!run) { - throw new Error("No chain run to end."); + return Promise.reject(new Error("No chain run to end.")); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -350,12 +350,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { if (kwargs?.inputs !== undefined) { run.inputs = _coerceToDict(kwargs.inputs, "input"); } - await this.onChainError?.(run); - await this._endTrace(run); - return run; + return (this.onChainError?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleToolStart( + handleToolStart( tool: Serialized, input: string, runId: string, @@ -387,16 +387,16 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - await this._startTrace(run); - await this.onToolStart?.(run); - return run; + return this._startTrace(run) + .then(() => this.onToolStart?.(run)) + .then(() => run); } // eslint-disable-next-line @typescript-eslint/no-explicit-any - async handleToolEnd(output: any, runId: string): Promise { + handleToolEnd(output: any, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "tool") { - throw new Error("No tool run to end"); + return Promise.reject(new Error("No tool run to end")); } run.end_time = Date.now(); run.outputs = { output }; @@ -404,15 +404,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "end", time: new Date(run.end_time).toISOString(), }); - await this.onToolEnd?.(run); - await this._endTrace(run); - return run; + return (this.onToolEnd?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleToolError(error: unknown, runId: string): Promise { + handleToolError(error: unknown, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "tool") { - throw new Error("No tool run to end"); + return Promise.reject(new Error("No tool run to end")); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -420,15 +420,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "error", time: new Date(run.end_time).toISOString(), }); - await this.onToolError?.(run); - await this._endTrace(run); - return run; + return (this.onToolError?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleAgentAction(action: AgentAction, runId: string): Promise { + handleAgentAction(action: AgentAction, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") { - return; + return Promise.resolve(); } const agentRun = run as AgentRun; agentRun.actions = agentRun.actions || []; @@ -438,23 +438,23 @@ export abstract class BaseTracer extends BaseCallbackHandler { time: new Date().toISOString(), kwargs: { action }, }); - await this.onAgentAction?.(run as AgentRun); + return this.onAgentAction?.(run as AgentRun) ?? Promise.resolve(); } - async handleAgentEnd(action: AgentFinish, runId: string): Promise { + handleAgentEnd(action: AgentFinish, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") { - return; + return Promise.resolve(); } run.events.push({ name: "agent_end", time: new Date().toISOString(), kwargs: { action }, }); - await this.onAgentEnd?.(run); + return this.onAgentEnd?.(run) ?? Promise.resolve(); } - async handleRetrieverStart( + handleRetrieverStart( retriever: Serialized, query: string, runId: string, @@ -486,18 +486,18 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - await this._startTrace(run); - await this.onRetrieverStart?.(run); - return run; + return this._startTrace(run) + .then(() => this.onRetrieverStart?.(run)) + .then(() => run); } - async handleRetrieverEnd( + handleRetrieverEnd( documents: Document>[], runId: string ): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "retriever") { - throw new Error("No retriever run to end"); + return Promise.reject(new Error("No retriever run to end")); } run.end_time = Date.now(); run.outputs = { documents }; @@ -505,15 +505,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "end", time: new Date(run.end_time).toISOString(), }); - await this.onRetrieverEnd?.(run); - await this._endTrace(run); - return run; + return (this.onRetrieverEnd?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleRetrieverError(error: unknown, runId: string): Promise { + handleRetrieverError(error: unknown, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "retriever") { - throw new Error("No retriever run to end"); + return Promise.reject(new Error("No retriever run to end")); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -521,25 +521,25 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "error", time: new Date(run.end_time).toISOString(), }); - await this.onRetrieverError?.(run); - await this._endTrace(run); - return run; + return (this.onRetrieverError?.(run) ?? Promise.resolve()) + .then(() => this._endTrace(run)) + .then(() => run); } - async handleText(text: string, runId: string): Promise { + handleText(text: string, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") { - return; + return Promise.resolve(); } run.events.push({ name: "text", time: new Date().toISOString(), kwargs: { text }, }); - await this.onText?.(run); + return this.onText?.(run) ?? Promise.resolve(); } - async handleLLMNewToken( + handleLLMNewToken( token: string, idx: NewTokenIndices, runId: string, @@ -549,8 +549,8 @@ export abstract class BaseTracer extends BaseCallbackHandler { ): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") { - throw new Error( - `Invalid "runId" provided to "handleLLMNewToken" callback.` + return Promise.reject( + new Error(`Invalid "runId" provided to "handleLLMNewToken" callback.`) ); } run.events.push({ @@ -558,8 +558,10 @@ export abstract class BaseTracer extends BaseCallbackHandler { time: new Date().toISOString(), kwargs: { token, idx, chunk: fields?.chunk }, }); - await this.onLLMNewToken?.(run, token, { chunk: fields?.chunk }); - return run; + return ( + this.onLLMNewToken?.(run, token, { chunk: fields?.chunk }) ?? + Promise.resolve() + ).then(() => run); } // custom event handlers diff --git a/package.json b/package.json index 7c45191ba7be..a8619af3bd41 100644 --- a/package.json +++ b/package.json @@ -62,6 +62,7 @@ "voy-search@0.6.2": "patch:voy-search@npm%3A0.6.2#./.yarn/patches/voy-search-npm-0.6.2-d4aca30a0e.patch", "@langchain/core": "workspace:*", "better-sqlite3": "9.4.0", + "langsmith": "0.1.39-rc.0", "zod": "3.23.8" }, "lint-staged": { diff --git a/yarn.lock b/yarn.lock index f96f516917e9..cee179ffd5dd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -30545,9 +30545,9 @@ __metadata: languageName: unknown linkType: soft -"langsmith@npm:^0.1.30, langsmith@npm:~0.1.30, langsmith@npm:~0.1.7": - version: 0.1.30 - resolution: "langsmith@npm:0.1.30" +"langsmith@npm:0.1.39-rc.0": + version: 0.1.39-rc.0 + resolution: "langsmith@npm:0.1.39-rc.0" dependencies: "@types/uuid": ^9.0.1 commander: ^10.0.1 @@ -30565,7 +30565,7 @@ __metadata: optional: true openai: optional: true - checksum: 61f4f645b0d95bf0ddec4a275a2fd6859a650569c1ca0d092b318dcabb96fc72d9ae45f35c20d53c2ff6c2d615a2a99f27bb5a974c44ae57c4f359783c26ee99 + checksum: ab57ce54075fb80d55d230dd3b70f8824733f21b1c5ee52df1ded7671ab51fb406a4235aa0679131bfefe53e1da5a408665c95643ba719f87b0680ee4c4e00d5 languageName: node linkType: hard From 4cc9cfe168af4d15134e2c914e8cc36551a7dc7e Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Thu, 18 Jul 2024 15:51:24 -0700 Subject: [PATCH 22/31] Adds unit test --- .../src/runnables/tests/tracing.int.test.ts | 49 --- .../tracers/tests/langsmith_interop.test.ts | 312 ++++++++++++++++++ 2 files changed, 312 insertions(+), 49 deletions(-) delete mode 100644 langchain-core/src/runnables/tests/tracing.int.test.ts create mode 100644 langchain-core/src/tracers/tests/langsmith_interop.test.ts diff --git a/langchain-core/src/runnables/tests/tracing.int.test.ts b/langchain-core/src/runnables/tests/tracing.int.test.ts deleted file mode 100644 index c64c6069380d..000000000000 --- a/langchain-core/src/runnables/tests/tracing.int.test.ts +++ /dev/null @@ -1,49 +0,0 @@ -// import { traceable } from "langsmith/traceable"; - -// import { RunnableLambda, RunnableMap } from "../../runnables/base.js"; -// import { AIMessage, BaseMessage, HumanMessage } from "../../messages/index.js"; -// import { LangChainTracer } from "../../tracers/tracer_langchain.js"; -// // import { Client } from "langsmith"; -// // const client = new Client({ -// // apiKey: "ls__5ee8c61b353f4ba6912ffefe40088608", -// // apiUrl: "http://127.0.0.1:1984", -// // }); - -// test("x", async () => { -// const tracer = new LangChainTracer({}); - -// const concatMsg = traceable( -// async (msg: string, name = "world") => { -// const nested = RunnableLambda.from(async () => { -// return "nested"; -// }); -// await nested.invoke({}); -// return `${msg} ${name}`; -// }, -// { -// name: "concat_msg", -// } -// ); - -// const aiGreet = traceable( -// async (msg: BaseMessage, name = "world") => -// new AIMessage({ content: await concatMsg(msg.content as string, name) }), -// { name: "aiGreet" } -// ); - -// const root = RunnableLambda.from(async (messages: BaseMessage[]) => { -// const lastMsg = messages.at(-1) as HumanMessage; -// const greetOne = await aiGreet(lastMsg, "David"); -// const greetTwo = await aiGreet(lastMsg, "Pavel"); - -// return [greetOne, greetTwo]; -// }); - -// const map = RunnableMap.from({ messages: root }); - -// console.log( -// await map.invoke([new HumanMessage({ content: "Hello!" })], { -// callbacks: [tracer], -// }) -// ); -// }); diff --git a/langchain-core/src/tracers/tests/langsmith_interop.test.ts b/langchain-core/src/tracers/tests/langsmith_interop.test.ts new file mode 100644 index 000000000000..6f09e6e7e859 --- /dev/null +++ b/langchain-core/src/tracers/tests/langsmith_interop.test.ts @@ -0,0 +1,312 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +/* eslint-disable no-process-env */ + +import { jest } from "@jest/globals"; +import { Client } from "langsmith"; +import { traceable } from "langsmith/traceable"; + +import { RunnableLambda } from "../../runnables/base.js"; +import { BaseMessage, HumanMessage } from "../../messages/index.js"; +import { LangChainTracer } from "../tracer_langchain.js"; + +type ClientParams = Exclude[0], undefined>; + +const mockClient = (config?: Omit) => { + const client = new Client({ + ...config, + apiKey: "MOCK", + autoBatchTracing: false, + }); + const callSpy = jest + .spyOn((client as any).caller, "call") + .mockResolvedValue({ ok: true, text: () => "" }); + + const langChainTracer = new LangChainTracer({ + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore Overriden client + client, + }); + + return { client, callSpy, langChainTracer }; +}; + +test.each(["true", "false"])( + "traceables nested within runnables with background callbacks %s", + async (value) => { + process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; + const { callSpy, langChainTracer: tracer } = mockClient(); + + const aiGreet = traceable( + async (msg: BaseMessage, name = "world") => { + return msg.content + name; + }, + { name: "aiGreet" } + ); + + const root = RunnableLambda.from(async (messages: BaseMessage[]) => { + const lastMsg = messages.at(-1) as HumanMessage; + const greetOne = await aiGreet(lastMsg, "David"); + + return [greetOne]; + }); + + await root.invoke([new HumanMessage({ content: "Hello!" })], { + callbacks: [tracer], + }); + + expect(callSpy.mock.calls.length).toEqual(4); + const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); + const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); + const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); + const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + expect(firstCallParams).toEqual({ + session_name: expect.any(String), + id: firstCallParams.id, + name: "RunnableLambda", + start_time: expect.any(Number), + serialized: { + lc: 1, + type: "not_implemented", + id: ["langchain_core", "runnables", "RunnableLambda"], + }, + events: [{ name: "start", time: expect.any(String) }], + inputs: { + input: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + ], + }, + execution_order: 1, + child_execution_order: 1, + run_type: "chain", + extra: expect.any(Object), + tags: [], + trace_id: firstCallParams.id, + dotted_order: expect.any(String), + }); + expect(secondCallParams).toEqual({ + session_name: expect.any(String), + id: expect.any(String), + name: "aiGreet", + start_time: expect.any(Number), + run_type: "chain", + extra: expect.any(Object), + serialized: {}, + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "David", + ], + }, + child_runs: [], + parent_run_id: firstCallParams.id, + trace_id: firstCallParams.id, + dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), + tags: [], + }); + expect(thirdCallParams).toEqual({ + end_time: expect.any(Number), + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "David", + ], + }, + outputs: { outputs: "Hello!David" }, + parent_run_id: firstCallParams.id, + extra: expect.any(Object), + dotted_order: secondCallParams.dotted_order, + trace_id: firstCallParams.id, + tags: [], + }); + expect(fourthCallParams).toEqual({ + end_time: expect.any(Number), + outputs: { output: ["Hello!David"] }, + events: [ + { name: "start", time: expect.any(String) }, + { name: "end", time: expect.any(String) }, + ], + inputs: { + input: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + ], + }, + trace_id: firstCallParams.id, + dotted_order: firstCallParams.dotted_order, + }); + } +); + +test.each(["true", "false"])( + "runnables nested within traceables with background callbacks %s", + async (value) => { + process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; + const { client, callSpy } = mockClient(); + + const nested = RunnableLambda.from(async (messages: BaseMessage[]) => { + const lastMsg = messages.at(-1) as HumanMessage; + return [lastMsg.content]; + }); + + const aiGreet = traceable( + async (msg: BaseMessage, name = "world") => { + const contents = await nested.invoke([msg]); + return contents[0] + name; + }, + { name: "aiGreet", client, tracingEnabled: true } + ); + + await aiGreet(new HumanMessage({ content: "Hello!" }), "mitochondria"); + + expect(callSpy.mock.calls.length).toEqual(4); + const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); + const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); + const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); + const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + expect(firstCallParams).toEqual({ + session_name: expect.any(String), + id: firstCallParams.id, + name: "aiGreet", + start_time: expect.any(Number), + run_type: "chain", + extra: expect.any(Object), + serialized: {}, + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "mitochondria", + ], + }, + child_runs: [], + trace_id: firstCallParams.id, + dotted_order: firstCallParams.dotted_order, + tags: [], + }); + expect(secondCallParams).toEqual({ + session_name: expect.any(String), + id: secondCallParams.id, + name: "RunnableLambda", + parent_run_id: firstCallParams.id, + start_time: expect.any(Number), + serialized: { + lc: 1, + type: "not_implemented", + id: ["langchain_core", "runnables", "RunnableLambda"], + }, + events: [{ name: "start", time: expect.any(String) }], + inputs: { + input: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + ], + }, + execution_order: 2, + child_execution_order: 2, + run_type: "chain", + extra: expect.any(Object), + tags: [], + trace_id: firstCallParams.id, + dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), + }); + expect(thirdCallParams).toEqual({ + end_time: expect.any(Number), + outputs: { output: ["Hello!"] }, + events: [ + { name: "start", time: expect.any(String) }, + { name: "end", time: expect.any(String) }, + ], + inputs: { + input: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + ], + }, + trace_id: firstCallParams.id, + dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), + parent_run_id: firstCallParams.id, + }); + expect(fourthCallParams).toEqual({ + end_time: expect.any(Number), + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "mitochondria", + ], + }, + outputs: { outputs: "Hello!mitochondria" }, + extra: expect.any(Object), + dotted_order: firstCallParams.dotted_order, + trace_id: firstCallParams.id, + tags: [], + }); + } +); From 14d70b9e889e433447525d58430fbce80c7ddf45 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Thu, 18 Jul 2024 16:45:58 -0700 Subject: [PATCH 23/31] Streaming tests --- .../tracers/tests/langsmith_interop.test.ts | 303 ++++++++++++++++++ 1 file changed, 303 insertions(+) diff --git a/langchain-core/src/tracers/tests/langsmith_interop.test.ts b/langchain-core/src/tracers/tests/langsmith_interop.test.ts index 6f09e6e7e859..6ab27c0e8ec7 100644 --- a/langchain-core/src/tracers/tests/langsmith_interop.test.ts +++ b/langchain-core/src/tracers/tests/langsmith_interop.test.ts @@ -172,6 +172,147 @@ test.each(["true", "false"])( } ); +test.each(["true", "false"])( + "streaming traceables nested within runnables with background callbacks %s", + async (value) => { + process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; + const { callSpy, langChainTracer: tracer } = mockClient(); + + const aiGreet = traceable( + async function* (msg: BaseMessage, name = "world") { + const res = msg.content + name; + for (const letter of res.split("")) { + yield letter; + } + }, + { name: "aiGreet" } + ); + + const root = RunnableLambda.from(async function* (messages: BaseMessage[]) { + const lastMsg = messages.at(-1) as HumanMessage; + yield* aiGreet(lastMsg, "David"); + }); + + const stream = await root.stream( + [new HumanMessage({ content: "Hello!" })], + { + callbacks: [tracer], + } + ); + + for await (const chunk of stream) { + console.log(chunk); + } + + expect(callSpy.mock.calls.length).toEqual(4); + const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); + const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); + const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); + const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + expect(firstCallParams).toEqual({ + session_name: expect.any(String), + id: firstCallParams.id, + name: "RunnableLambda", + start_time: expect.any(Number), + serialized: { + lc: 1, + type: "not_implemented", + id: ["langchain_core", "runnables", "RunnableLambda"], + }, + events: [{ name: "start", time: expect.any(String) }], + inputs: { + input: "", + }, + execution_order: 1, + child_execution_order: 1, + run_type: "chain", + extra: expect.any(Object), + tags: [], + trace_id: firstCallParams.id, + dotted_order: expect.any(String), + }); + expect(secondCallParams).toEqual({ + session_name: expect.any(String), + id: expect.any(String), + name: "aiGreet", + start_time: expect.any(Number), + run_type: "chain", + extra: expect.any(Object), + serialized: {}, + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "David", + ], + }, + child_runs: [], + parent_run_id: firstCallParams.id, + trace_id: firstCallParams.id, + dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), + tags: [], + }); + expect(thirdCallParams).toEqual({ + end_time: expect.any(Number), + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "David", + ], + }, + outputs: { + outputs: ["H", "e", "l", "l", "o", "!", "D", "a", "v", "i", "d"], + }, + parent_run_id: firstCallParams.id, + extra: expect.any(Object), + dotted_order: secondCallParams.dotted_order, + trace_id: firstCallParams.id, + tags: [], + }); + expect(fourthCallParams).toEqual({ + end_time: expect.any(Number), + outputs: { output: "Hello!David" }, + events: [ + { name: "start", time: expect.any(String) }, + { name: "end", time: expect.any(String) }, + ], + inputs: { + input: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + ], + }, + trace_id: firstCallParams.id, + dotted_order: firstCallParams.dotted_order, + }); + } +); + test.each(["true", "false"])( "runnables nested within traceables with background callbacks %s", async (value) => { @@ -310,3 +451,165 @@ test.each(["true", "false"])( }); } ); + +test.each(["true", "false"])( + "streaming runnables nested within traceables with background callbacks %s", + async (value) => { + process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; + const { client, callSpy } = mockClient(); + + const nested = RunnableLambda.from(async function* ( + messages: BaseMessage[] + ) { + const lastMsg = messages.at(-1) as HumanMessage; + for (const letter of (lastMsg.content as string).split("")) { + yield letter; + } + }); + + const aiGreet = traceable( + async function* (msg: BaseMessage, name = "world") { + for await (const chunk of await nested.stream([msg])) { + yield chunk; + } + for (const letter of name.split("")) { + yield letter; + } + }, + { name: "aiGreet", client, tracingEnabled: true } + ); + + for await (const chunk of aiGreet( + new HumanMessage({ content: "Hello!" }), + "mitochondria" + )) { + console.log(chunk); + } + + expect(callSpy.mock.calls.length).toEqual(4); + const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); + const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); + const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); + const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + expect(firstCallParams).toEqual({ + session_name: expect.any(String), + id: firstCallParams.id, + name: "aiGreet", + start_time: expect.any(Number), + run_type: "chain", + extra: expect.any(Object), + serialized: {}, + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "mitochondria", + ], + }, + child_runs: [], + trace_id: firstCallParams.id, + dotted_order: firstCallParams.dotted_order, + tags: [], + }); + expect(secondCallParams).toEqual({ + session_name: expect.any(String), + id: secondCallParams.id, + name: "RunnableLambda", + parent_run_id: firstCallParams.id, + start_time: expect.any(Number), + serialized: { + lc: 1, + type: "not_implemented", + id: ["langchain_core", "runnables", "RunnableLambda"], + }, + events: [{ name: "start", time: expect.any(String) }], + inputs: { + input: "", + }, + execution_order: 2, + child_execution_order: 2, + run_type: "chain", + extra: expect.any(Object), + tags: [], + trace_id: firstCallParams.id, + dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), + }); + expect(thirdCallParams).toEqual({ + end_time: expect.any(Number), + outputs: { output: "Hello!" }, + events: [ + { name: "start", time: expect.any(String) }, + { name: "end", time: expect.any(String) }, + ], + inputs: { + input: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + ], + }, + trace_id: firstCallParams.id, + dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), + parent_run_id: firstCallParams.id, + }); + expect(fourthCallParams).toEqual({ + end_time: expect.any(Number), + inputs: { + args: [ + { + lc: 1, + type: "constructor", + id: ["langchain_core", "messages", "HumanMessage"], + kwargs: { + content: "Hello!", + additional_kwargs: {}, + response_metadata: {}, + }, + }, + "mitochondria", + ], + }, + outputs: { + outputs: [ + "H", + "e", + "l", + "l", + "o", + "!", + "m", + "i", + "t", + "o", + "c", + "h", + "o", + "n", + "d", + "r", + "i", + "a", + ], + }, + extra: expect.any(Object), + dotted_order: firstCallParams.dotted_order, + trace_id: firstCallParams.id, + tags: [], + }); + } +); From bdb353141bc3c781fb4e2432d33a775a30dcd07d Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 19 Jul 2024 10:33:19 -0700 Subject: [PATCH 24/31] Revert base tracer --- langchain-core/src/tracers/base.ts | 154 ++++++++++++++--------------- 1 file changed, 76 insertions(+), 78 deletions(-) diff --git a/langchain-core/src/tracers/base.ts b/langchain-core/src/tracers/base.ts index f44222b6aacb..69cc098cdc55 100644 --- a/langchain-core/src/tracers/base.ts +++ b/langchain-core/src/tracers/base.ts @@ -90,7 +90,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { parentRun.child_runs.push(childRun); } - protected _startTrace(run: Run): Promise { + protected async _startTrace(run: Run) { const currentDottedOrder = convertToDottedOrderFormat( run.start_time, run.id, @@ -126,7 +126,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { storedRun.dotted_order = currentDottedOrder; } this.runMap.set(storedRun.id, storedRun); - return this.onRunCreate?.(storedRun) ?? Promise.resolve(); + await this.onRunCreate?.(storedRun); } protected async _endTrace(run: Run): Promise { @@ -154,7 +154,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { return parentRun.child_execution_order + 1; } - handleLLMStart( + async handleLLMStart( llm: Serialized, prompts: string[], runId: string, @@ -190,12 +190,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - return this._startTrace(run) - .then(() => this.onLLMStart?.(run)) - .then(() => run); + await this._startTrace(run); + await this.onLLMStart?.(run); + return run; } - handleChatModelStart( + async handleChatModelStart( llm: Serialized, messages: BaseMessage[][], runId: string, @@ -231,15 +231,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - return this._startTrace(run) - .then(() => this.onLLMStart?.(run)) - .then(() => run); + await this._startTrace(run); + await this.onLLMStart?.(run); + return run; } - handleLLMEnd(output: LLMResult, runId: string): Promise { + async handleLLMEnd(output: LLMResult, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") { - return Promise.reject(new Error("No LLM run to end.")); + throw new Error("No LLM run to end."); } run.end_time = Date.now(); run.outputs = output; @@ -247,15 +247,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "end", time: new Date(run.end_time).toISOString(), }); - return (this.onLLMEnd?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onLLMEnd?.(run); + await this._endTrace(run); + return run; } - handleLLMError(error: unknown, runId: string): Promise { + async handleLLMError(error: unknown, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") { - return Promise.reject(new Error("No LLM run to end.")); + throw new Error("No LLM run to end."); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -263,12 +263,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "error", time: new Date(run.end_time).toISOString(), }); - return (this.onLLMError?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onLLMError?.(run); + await this._endTrace(run); + return run; } - handleChainStart( + async handleChainStart( chain: Serialized, inputs: ChainValues, runId: string, @@ -300,12 +300,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: metadata ? { metadata } : {}, tags: tags || [], }; - return this._startTrace(run) - .then(() => this.onChainStart?.(run)) - .then(() => run); + await this._startTrace(run); + await this.onChainStart?.(run); + return run; } - handleChainEnd( + async handleChainEnd( outputs: ChainValues, runId: string, _parentRunId?: string, @@ -314,7 +314,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { ): Promise { const run = this.runMap.get(runId); if (!run) { - return Promise.reject(new Error("No chain run to end.")); + throw new Error("No chain run to end."); } run.end_time = Date.now(); run.outputs = _coerceToDict(outputs, "output"); @@ -325,12 +325,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { if (kwargs?.inputs !== undefined) { run.inputs = _coerceToDict(kwargs.inputs, "input"); } - return (this.onChainEnd?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onChainEnd?.(run); + await this._endTrace(run); + return run; } - handleChainError( + async handleChainError( error: unknown, runId: string, _parentRunId?: string, @@ -339,7 +339,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { ): Promise { const run = this.runMap.get(runId); if (!run) { - return Promise.reject(new Error("No chain run to end.")); + throw new Error("No chain run to end."); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -350,12 +350,12 @@ export abstract class BaseTracer extends BaseCallbackHandler { if (kwargs?.inputs !== undefined) { run.inputs = _coerceToDict(kwargs.inputs, "input"); } - return (this.onChainError?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onChainError?.(run); + await this._endTrace(run); + return run; } - handleToolStart( + async handleToolStart( tool: Serialized, input: string, runId: string, @@ -387,16 +387,16 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - return this._startTrace(run) - .then(() => this.onToolStart?.(run)) - .then(() => run); + await this._startTrace(run); + await this.onToolStart?.(run); + return run; } // eslint-disable-next-line @typescript-eslint/no-explicit-any - handleToolEnd(output: any, runId: string): Promise { + async handleToolEnd(output: any, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "tool") { - return Promise.reject(new Error("No tool run to end")); + throw new Error("No tool run to end"); } run.end_time = Date.now(); run.outputs = { output }; @@ -404,15 +404,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "end", time: new Date(run.end_time).toISOString(), }); - return (this.onToolEnd?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onToolEnd?.(run); + await this._endTrace(run); + return run; } - handleToolError(error: unknown, runId: string): Promise { + async handleToolError(error: unknown, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "tool") { - return Promise.reject(new Error("No tool run to end")); + throw new Error("No tool run to end"); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -420,15 +420,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "error", time: new Date(run.end_time).toISOString(), }); - return (this.onToolError?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onToolError?.(run); + await this._endTrace(run); + return run; } - handleAgentAction(action: AgentAction, runId: string): Promise { + async handleAgentAction(action: AgentAction, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") { - return Promise.resolve(); + return; } const agentRun = run as AgentRun; agentRun.actions = agentRun.actions || []; @@ -438,23 +438,23 @@ export abstract class BaseTracer extends BaseCallbackHandler { time: new Date().toISOString(), kwargs: { action }, }); - return this.onAgentAction?.(run as AgentRun) ?? Promise.resolve(); + await this.onAgentAction?.(run as AgentRun); } - handleAgentEnd(action: AgentFinish, runId: string): Promise { + async handleAgentEnd(action: AgentFinish, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") { - return Promise.resolve(); + return; } run.events.push({ name: "agent_end", time: new Date().toISOString(), kwargs: { action }, }); - return this.onAgentEnd?.(run) ?? Promise.resolve(); + await this.onAgentEnd?.(run); } - handleRetrieverStart( + async handleRetrieverStart( retriever: Serialized, query: string, runId: string, @@ -486,18 +486,18 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags: tags || [], }; - return this._startTrace(run) - .then(() => this.onRetrieverStart?.(run)) - .then(() => run); + await this._startTrace(run); + await this.onRetrieverStart?.(run); + return run; } - handleRetrieverEnd( + async handleRetrieverEnd( documents: Document>[], runId: string ): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "retriever") { - return Promise.reject(new Error("No retriever run to end")); + throw new Error("No retriever run to end"); } run.end_time = Date.now(); run.outputs = { documents }; @@ -505,15 +505,15 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "end", time: new Date(run.end_time).toISOString(), }); - return (this.onRetrieverEnd?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onRetrieverEnd?.(run); + await this._endTrace(run); + return run; } - handleRetrieverError(error: unknown, runId: string): Promise { + async handleRetrieverError(error: unknown, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "retriever") { - return Promise.reject(new Error("No retriever run to end")); + throw new Error("No retriever run to end"); } run.end_time = Date.now(); run.error = this.stringifyError(error); @@ -521,25 +521,25 @@ export abstract class BaseTracer extends BaseCallbackHandler { name: "error", time: new Date(run.end_time).toISOString(), }); - return (this.onRetrieverError?.(run) ?? Promise.resolve()) - .then(() => this._endTrace(run)) - .then(() => run); + await this.onRetrieverError?.(run); + await this._endTrace(run); + return run; } - handleText(text: string, runId: string): Promise { + async handleText(text: string, runId: string): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "chain") { - return Promise.resolve(); + return; } run.events.push({ name: "text", time: new Date().toISOString(), kwargs: { text }, }); - return this.onText?.(run) ?? Promise.resolve(); + await this.onText?.(run); } - handleLLMNewToken( + async handleLLMNewToken( token: string, idx: NewTokenIndices, runId: string, @@ -549,8 +549,8 @@ export abstract class BaseTracer extends BaseCallbackHandler { ): Promise { const run = this.runMap.get(runId); if (!run || run?.run_type !== "llm") { - return Promise.reject( - new Error(`Invalid "runId" provided to "handleLLMNewToken" callback.`) + throw new Error( + `Invalid "runId" provided to "handleLLMNewToken" callback.` ); } run.events.push({ @@ -558,10 +558,8 @@ export abstract class BaseTracer extends BaseCallbackHandler { time: new Date().toISOString(), kwargs: { token, idx, chunk: fields?.chunk }, }); - return ( - this.onLLMNewToken?.(run, token, { chunk: fields?.chunk }) ?? - Promise.resolve() - ).then(() => run); + await this.onLLMNewToken?.(run, token, { chunk: fields?.chunk }); + return run; } // custom event handlers From 95fc4575d3789124d479ca63da6a969a21da63b4 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 19 Jul 2024 11:32:24 -0700 Subject: [PATCH 25/31] Adds sync methods for adding runs to run map for tracers --- langchain-core/src/callbacks/manager.ts | 316 ++++++++++++++---------- langchain-core/src/tracers/base.ts | 155 ++++++++++-- 2 files changed, 328 insertions(+), 143 deletions(-) diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 05033c8d568f..864cb83f0602 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -20,6 +20,7 @@ import { consumeCallback } from "./promises.js"; import { Serialized } from "../load/serializable.js"; import type { DocumentInterface } from "../documents/document.js"; import { isTracingEnabled } from "../utils/callbacks.js"; +import { isBaseTracer } from "../tracers/base.js"; if ( /* #__PURE__ */ isTracingEnabled() && @@ -607,31 +608,44 @@ export class CallbackManager const runId_ = idx === 0 && runId ? runId : uuidv4(); await Promise.all( - this.handlers.map((handler) => - consumeCallback(async () => { - if (!handler.ignoreLLM) { - try { - await handler.handleLLMStart?.( - llm, - [prompt], - runId_, - this._parentRunId, - extraParams, - this.tags, - this.metadata, - runName - ); - } catch (err) { - console.error( - `Error in handler ${handler.constructor.name}, handleLLMStart: ${err}` - ); - if (handler.raiseError) { - throw err; - } + this.handlers.map((handler) => { + if (handler.ignoreLLM) { + return; + } + if (isBaseTracer(handler)) { + handler._createRunForLLMStart( + llm, + [prompt], + runId_, + this._parentRunId, + extraParams, + this.tags, + this.metadata, + runName + ); + } + return consumeCallback(async () => { + try { + await handler.handleLLMStart?.( + llm, + [prompt], + runId_, + this._parentRunId, + extraParams, + this.tags, + this.metadata, + runName + ); + } catch (err) { + console.error( + `Error in handler ${handler.constructor.name}, handleLLMStart: ${err}` + ); + if (handler.raiseError) { + throw err; } } - }, handler.awaitHandlers) - ) + }, handler.awaitHandlers); + }) ); return new CallbackManagerForLLMRun( @@ -664,45 +678,58 @@ export class CallbackManager const runId_ = idx === 0 && runId ? runId : uuidv4(); await Promise.all( - this.handlers.map((handler) => - consumeCallback(async () => { - if (!handler.ignoreLLM) { - try { - if (handler.handleChatModelStart) { - await handler.handleChatModelStart?.( - llm, - [messageGroup], - runId_, - this._parentRunId, - extraParams, - this.tags, - this.metadata, - runName - ); - } else if (handler.handleLLMStart) { - const messageString = getBufferString(messageGroup); - await handler.handleLLMStart?.( - llm, - [messageString], - runId_, - this._parentRunId, - extraParams, - this.tags, - this.metadata, - runName - ); - } - } catch (err) { - console.error( - `Error in handler ${handler.constructor.name}, handleLLMStart: ${err}` + this.handlers.map((handler) => { + if (handler.ignoreLLM) { + return; + } + if (isBaseTracer(handler)) { + handler._createRunForChatModelStart( + llm, + [messageGroup], + runId_, + this._parentRunId, + extraParams, + this.tags, + this.metadata, + runName + ); + } + return consumeCallback(async () => { + try { + if (handler.handleChatModelStart) { + await handler.handleChatModelStart?.( + llm, + [messageGroup], + runId_, + this._parentRunId, + extraParams, + this.tags, + this.metadata, + runName + ); + } else if (handler.handleLLMStart) { + const messageString = getBufferString(messageGroup); + await handler.handleLLMStart?.( + llm, + [messageString], + runId_, + this._parentRunId, + extraParams, + this.tags, + this.metadata, + runName ); - if (handler.raiseError) { - throw err; - } + } + } catch (err) { + console.error( + `Error in handler ${handler.constructor.name}, handleLLMStart: ${err}` + ); + if (handler.raiseError) { + throw err; } } - }, handler.awaitHandlers) - ) + }, handler.awaitHandlers); + }) ); return new CallbackManagerForLLMRun( @@ -729,31 +756,44 @@ export class CallbackManager runName: string | undefined = undefined ): Promise { await Promise.all( - this.handlers.map((handler) => - consumeCallback(async () => { - if (!handler.ignoreChain) { - try { - await handler.handleChainStart?.( - chain, - inputs, - runId, - this._parentRunId, - this.tags, - this.metadata, - runType, - runName - ); - } catch (err) { - console.error( - `Error in handler ${handler.constructor.name}, handleChainStart: ${err}` - ); - if (handler.raiseError) { - throw err; - } + this.handlers.map((handler) => { + if (handler.ignoreChain) { + return; + } + if (isBaseTracer(handler)) { + handler._createRunForChainStart( + chain, + inputs, + runId, + this._parentRunId, + this.tags, + this.metadata, + runType, + runName + ); + } + return consumeCallback(async () => { + try { + await handler.handleChainStart?.( + chain, + inputs, + runId, + this._parentRunId, + this.tags, + this.metadata, + runType, + runName + ); + } catch (err) { + console.error( + `Error in handler ${handler.constructor.name}, handleChainStart: ${err}` + ); + if (handler.raiseError) { + throw err; } } - }, handler.awaitHandlers) - ) + }, handler.awaitHandlers); + }) ); return new CallbackManagerForChainRun( runId, @@ -777,30 +817,42 @@ export class CallbackManager runName: string | undefined = undefined ): Promise { await Promise.all( - this.handlers.map((handler) => - consumeCallback(async () => { - if (!handler.ignoreAgent) { - try { - await handler.handleToolStart?.( - tool, - input, - runId, - this._parentRunId, - this.tags, - this.metadata, - runName - ); - } catch (err) { - console.error( - `Error in handler ${handler.constructor.name}, handleToolStart: ${err}` - ); - if (handler.raiseError) { - throw err; - } + this.handlers.map((handler) => { + if (handler.ignoreAgent) { + return; + } + if (isBaseTracer(handler)) { + handler._createRunForToolStart( + tool, + input, + runId, + this._parentRunId, + this.tags, + this.metadata, + runName + ); + } + return consumeCallback(async () => { + try { + await handler.handleToolStart?.( + tool, + input, + runId, + this._parentRunId, + this.tags, + this.metadata, + runName + ); + } catch (err) { + console.error( + `Error in handler ${handler.constructor.name}, handleToolStart: ${err}` + ); + if (handler.raiseError) { + throw err; } } - }, handler.awaitHandlers) - ) + }, handler.awaitHandlers); + }) ); return new CallbackManagerForToolRun( runId, @@ -824,30 +876,42 @@ export class CallbackManager runName: string | undefined = undefined ): Promise { await Promise.all( - this.handlers.map((handler) => - consumeCallback(async () => { - if (!handler.ignoreRetriever) { - try { - await handler.handleRetrieverStart?.( - retriever, - query, - runId, - this._parentRunId, - this.tags, - this.metadata, - runName - ); - } catch (err) { - console.error( - `Error in handler ${handler.constructor.name}, handleRetrieverStart: ${err}` - ); - if (handler.raiseError) { - throw err; - } + this.handlers.map((handler) => { + if (handler.ignoreRetriever) { + return; + } + if (isBaseTracer(handler)) { + handler._createRunForRetrieverStart( + retriever, + query, + runId, + this._parentRunId, + this.tags, + this.metadata, + runName + ); + } + return consumeCallback(async () => { + try { + await handler.handleRetrieverStart?.( + retriever, + query, + runId, + this._parentRunId, + this.tags, + this.metadata, + runName + ); + } catch (err) { + console.error( + `Error in handler ${handler.constructor.name}, handleRetrieverStart: ${err}` + ); + if (handler.raiseError) { + throw err; } } - }, handler.awaitHandlers) - ) + }, handler.awaitHandlers); + }) ); return new CallbackManagerForRetrieverRun( runId, diff --git a/langchain-core/src/tracers/base.ts b/langchain-core/src/tracers/base.ts index 69cc098cdc55..b96aeb954616 100644 --- a/langchain-core/src/tracers/base.ts +++ b/langchain-core/src/tracers/base.ts @@ -60,6 +60,10 @@ function convertToDottedOrderFormat( ); } +export function isBaseTracer(x: BaseCallbackHandler): x is BaseTracer { + return typeof (x as BaseTracer)._addRunToRunMap === "function"; +} + export abstract class BaseTracer extends BaseCallbackHandler { protected runMap: Map = new Map(); @@ -90,7 +94,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { parentRun.child_runs.push(childRun); } - protected async _startTrace(run: Run) { + _addRunToRunMap(run: Run) { const currentDottedOrder = convertToDottedOrderFormat( run.start_time, run.id, @@ -126,7 +130,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { storedRun.dotted_order = currentDottedOrder; } this.runMap.set(storedRun.id, storedRun); - await this.onRunCreate?.(storedRun); + return storedRun; } protected async _endTrace(run: Run): Promise { @@ -154,7 +158,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { return parentRun.child_execution_order + 1; } - async handleLLMStart( + _createRunForLLMStart( llm: Serialized, prompts: string[], runId: string, @@ -163,7 +167,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags?: string[], metadata?: KVMap, name?: string - ): Promise { + ) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const finalExtraParams = metadata @@ -189,13 +193,37 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: finalExtraParams ?? {}, tags: tags || [], }; + return this._addRunToRunMap(run); + } - await this._startTrace(run); + async handleLLMStart( + llm: Serialized, + prompts: string[], + runId: string, + parentRunId?: string, + extraParams?: KVMap, + tags?: string[], + metadata?: KVMap, + name?: string + ): Promise { + const run = + this.runMap.get(runId) ?? + this._createRunForLLMStart( + llm, + prompts, + runId, + parentRunId, + extraParams, + tags, + metadata, + name + ); + await this.onRunCreate?.(run); await this.onLLMStart?.(run); return run; } - async handleChatModelStart( + _createRunForChatModelStart( llm: Serialized, messages: BaseMessage[][], runId: string, @@ -204,7 +232,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags?: string[], metadata?: KVMap, name?: string - ): Promise { + ) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const finalExtraParams = metadata @@ -230,8 +258,32 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: finalExtraParams ?? {}, tags: tags || [], }; + return this._addRunToRunMap(run); + } - await this._startTrace(run); + async handleChatModelStart( + llm: Serialized, + messages: BaseMessage[][], + runId: string, + parentRunId?: string, + extraParams?: KVMap, + tags?: string[], + metadata?: KVMap, + name?: string + ): Promise { + const run = + this.runMap.get(runId) ?? + this._createRunForChatModelStart( + llm, + messages, + runId, + parentRunId, + extraParams, + tags, + metadata, + name + ); + await this.onRunCreate?.(run); await this.onLLMStart?.(run); return run; } @@ -268,7 +320,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { return run; } - async handleChainStart( + _createRunForChainStart( chain: Serialized, inputs: ChainValues, runId: string, @@ -277,7 +329,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { metadata?: KVMap, runType?: string, name?: string - ): Promise { + ) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const run: Run = { @@ -300,7 +352,32 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: metadata ? { metadata } : {}, tags: tags || [], }; - await this._startTrace(run); + return this._addRunToRunMap(run); + } + + async handleChainStart( + chain: Serialized, + inputs: ChainValues, + runId: string, + parentRunId?: string, + tags?: string[], + metadata?: KVMap, + runType?: string, + name?: string + ): Promise { + const run = + this.runMap.get(runId) ?? + this._createRunForChainStart( + chain, + inputs, + runId, + parentRunId, + tags, + metadata, + runType, + name + ); + await this.onRunCreate?.(run); await this.onChainStart?.(run); return run; } @@ -355,7 +432,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { return run; } - async handleToolStart( + _createRunForToolStart( tool: Serialized, input: string, runId: string, @@ -363,7 +440,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags?: string[], metadata?: KVMap, name?: string - ): Promise { + ) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const run: Run = { @@ -386,8 +463,30 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: metadata ? { metadata } : {}, tags: tags || [], }; + return this._addRunToRunMap(run); + } - await this._startTrace(run); + async handleToolStart( + tool: Serialized, + input: string, + runId: string, + parentRunId?: string, + tags?: string[], + metadata?: KVMap, + name?: string + ): Promise { + const run = + this.runMap.get(runId) ?? + this._createRunForToolStart( + tool, + input, + runId, + parentRunId, + tags, + metadata, + name + ); + await this.onRunCreate?.(run); await this.onToolStart?.(run); return run; } @@ -454,7 +553,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { await this.onAgentEnd?.(run); } - async handleRetrieverStart( + _createRunForRetrieverStart( retriever: Serialized, query: string, runId: string, @@ -462,7 +561,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { tags?: string[], metadata?: KVMap, name?: string - ): Promise { + ) { const execution_order = this._getExecutionOrder(parentRunId); const start_time = Date.now(); const run: Run = { @@ -485,8 +584,30 @@ export abstract class BaseTracer extends BaseCallbackHandler { extra: metadata ? { metadata } : {}, tags: tags || [], }; + return this._addRunToRunMap(run); + } - await this._startTrace(run); + async handleRetrieverStart( + retriever: Serialized, + query: string, + runId: string, + parentRunId?: string, + tags?: string[], + metadata?: KVMap, + name?: string + ): Promise { + let run = + this.runMap.get(runId) ?? + this._createRunForRetrieverStart( + retriever, + query, + runId, + parentRunId, + tags, + metadata, + name + ); + await this.onRunCreate?.(run); await this.onRetrieverStart?.(run); return run; } From 86d94aeac517a0c159a866c38a99ec79532118b8 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 19 Jul 2024 11:38:03 -0700 Subject: [PATCH 26/31] Docstrings, lint --- langchain-core/src/callbacks/manager.ts | 15 ++++++++++++++ langchain-core/src/tracers/base.ts | 27 ++++++++++++++++++++++++- 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/langchain-core/src/callbacks/manager.ts b/langchain-core/src/callbacks/manager.ts index 864cb83f0602..6e784c0f90f0 100644 --- a/langchain-core/src/callbacks/manager.ts +++ b/langchain-core/src/callbacks/manager.ts @@ -613,6 +613,9 @@ export class CallbackManager return; } if (isBaseTracer(handler)) { + // Create and add run to the run map. + // We do this synchronously to avoid race conditions + // when callbacks are backgrounded. handler._createRunForLLMStart( llm, [prompt], @@ -683,6 +686,9 @@ export class CallbackManager return; } if (isBaseTracer(handler)) { + // Create and add run to the run map. + // We do this synchronously to avoid race conditions + // when callbacks are backgrounded. handler._createRunForChatModelStart( llm, [messageGroup], @@ -761,6 +767,9 @@ export class CallbackManager return; } if (isBaseTracer(handler)) { + // Create and add run to the run map. + // We do this synchronously to avoid race conditions + // when callbacks are backgrounded. handler._createRunForChainStart( chain, inputs, @@ -822,6 +831,9 @@ export class CallbackManager return; } if (isBaseTracer(handler)) { + // Create and add run to the run map. + // We do this synchronously to avoid race conditions + // when callbacks are backgrounded. handler._createRunForToolStart( tool, input, @@ -881,6 +893,9 @@ export class CallbackManager return; } if (isBaseTracer(handler)) { + // Create and add run to the run map. + // We do this synchronously to avoid race conditions + // when callbacks are backgrounded. handler._createRunForRetrieverStart( retriever, query, diff --git a/langchain-core/src/tracers/base.ts b/langchain-core/src/tracers/base.ts index b96aeb954616..c9eb56821a6c 100644 --- a/langchain-core/src/tracers/base.ts +++ b/langchain-core/src/tracers/base.ts @@ -158,6 +158,11 @@ export abstract class BaseTracer extends BaseCallbackHandler { return parentRun.child_execution_order + 1; } + /** + * Create and add a run to the run map for LLM start events. + * This must sometimes be done synchronously to avoid race conditions + * when callbacks are backgrounded, so we expose it as a separate method here. + */ _createRunForLLMStart( llm: Serialized, prompts: string[], @@ -223,6 +228,11 @@ export abstract class BaseTracer extends BaseCallbackHandler { return run; } + /** + * Create and add a run to the run map for chat model start events. + * This must sometimes be done synchronously to avoid race conditions + * when callbacks are backgrounded, so we expose it as a separate method here. + */ _createRunForChatModelStart( llm: Serialized, messages: BaseMessage[][], @@ -320,6 +330,11 @@ export abstract class BaseTracer extends BaseCallbackHandler { return run; } + /** + * Create and add a run to the run map for chain start events. + * This must sometimes be done synchronously to avoid race conditions + * when callbacks are backgrounded, so we expose it as a separate method here. + */ _createRunForChainStart( chain: Serialized, inputs: ChainValues, @@ -432,6 +447,11 @@ export abstract class BaseTracer extends BaseCallbackHandler { return run; } + /** + * Create and add a run to the run map for tool start events. + * This must sometimes be done synchronously to avoid race conditions + * when callbacks are backgrounded, so we expose it as a separate method here. + */ _createRunForToolStart( tool: Serialized, input: string, @@ -553,6 +573,11 @@ export abstract class BaseTracer extends BaseCallbackHandler { await this.onAgentEnd?.(run); } + /** + * Create and add a run to the run map for retriever start events. + * This must sometimes be done synchronously to avoid race conditions + * when callbacks are backgrounded, so we expose it as a separate method here. + */ _createRunForRetrieverStart( retriever: Serialized, query: string, @@ -596,7 +621,7 @@ export abstract class BaseTracer extends BaseCallbackHandler { metadata?: KVMap, name?: string ): Promise { - let run = + const run = this.runMap.get(runId) ?? this._createRunForRetrieverStart( retriever, From bd75f551e6f07ea733887b0298bb209795683b59 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 19 Jul 2024 11:45:02 -0700 Subject: [PATCH 27/31] Fix test --- .../src/tracers/tests/langsmith_interop.test.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/langchain-core/src/tracers/tests/langsmith_interop.test.ts b/langchain-core/src/tracers/tests/langsmith_interop.test.ts index 6ab27c0e8ec7..2f3ac9e8f43c 100644 --- a/langchain-core/src/tracers/tests/langsmith_interop.test.ts +++ b/langchain-core/src/tracers/tests/langsmith_interop.test.ts @@ -60,7 +60,7 @@ test.each(["true", "false"])( const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); expect(firstCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: firstCallParams.id, name: "RunnableLambda", start_time: expect.any(Number), @@ -93,7 +93,7 @@ test.each(["true", "false"])( dotted_order: expect.any(String), }); expect(secondCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: expect.any(String), name: "aiGreet", start_time: expect.any(Number), @@ -210,7 +210,7 @@ test.each(["true", "false"])( const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); expect(firstCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: firstCallParams.id, name: "RunnableLambda", start_time: expect.any(Number), @@ -232,7 +232,7 @@ test.each(["true", "false"])( dotted_order: expect.any(String), }); expect(secondCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: expect.any(String), name: "aiGreet", start_time: expect.any(Number), @@ -340,7 +340,7 @@ test.each(["true", "false"])( const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); expect(firstCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: firstCallParams.id, name: "aiGreet", start_time: expect.any(Number), @@ -368,7 +368,7 @@ test.each(["true", "false"])( tags: [], }); expect(secondCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: secondCallParams.id, name: "RunnableLambda", parent_run_id: firstCallParams.id, @@ -492,7 +492,7 @@ test.each(["true", "false"])( const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); expect(firstCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: firstCallParams.id, name: "aiGreet", start_time: expect.any(Number), @@ -520,7 +520,7 @@ test.each(["true", "false"])( tags: [], }); expect(secondCallParams).toEqual({ - session_name: expect.any(String), + session_name: expect.anything() || undefined, id: secondCallParams.id, name: "RunnableLambda", parent_run_id: firstCallParams.id, From 2a7ae7773d9af244199f283e367e7a53b6b875ad Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 19 Jul 2024 11:51:02 -0700 Subject: [PATCH 28/31] Relax test --- .../tracers/tests/langsmith_interop.test.ts | 40 ++++++++----------- 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/langchain-core/src/tracers/tests/langsmith_interop.test.ts b/langchain-core/src/tracers/tests/langsmith_interop.test.ts index 2f3ac9e8f43c..61fcb7fef322 100644 --- a/langchain-core/src/tracers/tests/langsmith_interop.test.ts +++ b/langchain-core/src/tracers/tests/langsmith_interop.test.ts @@ -59,8 +59,7 @@ test.each(["true", "false"])( const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); - expect(firstCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "RunnableLambda", start_time: expect.any(Number), @@ -92,8 +91,7 @@ test.each(["true", "false"])( trace_id: firstCallParams.id, dotted_order: expect.any(String), }); - expect(secondCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(secondCallParams).toMatchObject({ id: expect.any(String), name: "aiGreet", start_time: expect.any(Number), @@ -121,7 +119,7 @@ test.each(["true", "false"])( dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), tags: [], }); - expect(thirdCallParams).toEqual({ + expect(thirdCallParams).toMatchObject({ end_time: expect.any(Number), inputs: { args: [ @@ -145,7 +143,7 @@ test.each(["true", "false"])( trace_id: firstCallParams.id, tags: [], }); - expect(fourthCallParams).toEqual({ + expect(fourthCallParams).toMatchObject({ end_time: expect.any(Number), outputs: { output: ["Hello!David"] }, events: [ @@ -209,8 +207,7 @@ test.each(["true", "false"])( const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); - expect(firstCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "RunnableLambda", start_time: expect.any(Number), @@ -231,8 +228,7 @@ test.each(["true", "false"])( trace_id: firstCallParams.id, dotted_order: expect.any(String), }); - expect(secondCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(secondCallParams).toMatchObject({ id: expect.any(String), name: "aiGreet", start_time: expect.any(Number), @@ -260,7 +256,7 @@ test.each(["true", "false"])( dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), tags: [], }); - expect(thirdCallParams).toEqual({ + expect(thirdCallParams).toMatchObject({ end_time: expect.any(Number), inputs: { args: [ @@ -286,7 +282,7 @@ test.each(["true", "false"])( trace_id: firstCallParams.id, tags: [], }); - expect(fourthCallParams).toEqual({ + expect(fourthCallParams).toMatchObject({ end_time: expect.any(Number), outputs: { output: "Hello!David" }, events: [ @@ -339,8 +335,7 @@ test.each(["true", "false"])( const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); - expect(firstCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "aiGreet", start_time: expect.any(Number), @@ -367,8 +362,7 @@ test.each(["true", "false"])( dotted_order: firstCallParams.dotted_order, tags: [], }); - expect(secondCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(secondCallParams).toMatchObject({ id: secondCallParams.id, name: "RunnableLambda", parent_run_id: firstCallParams.id, @@ -401,7 +395,7 @@ test.each(["true", "false"])( trace_id: firstCallParams.id, dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), }); - expect(thirdCallParams).toEqual({ + expect(thirdCallParams).toMatchObject({ end_time: expect.any(Number), outputs: { output: ["Hello!"] }, events: [ @@ -426,7 +420,7 @@ test.each(["true", "false"])( dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), parent_run_id: firstCallParams.id, }); - expect(fourthCallParams).toEqual({ + expect(fourthCallParams).toMatchObject({ end_time: expect.any(Number), inputs: { args: [ @@ -491,8 +485,7 @@ test.each(["true", "false"])( const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); - expect(firstCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "aiGreet", start_time: expect.any(Number), @@ -519,8 +512,7 @@ test.each(["true", "false"])( dotted_order: firstCallParams.dotted_order, tags: [], }); - expect(secondCallParams).toEqual({ - session_name: expect.anything() || undefined, + expect(secondCallParams).toMatchObject({ id: secondCallParams.id, name: "RunnableLambda", parent_run_id: firstCallParams.id, @@ -542,7 +534,7 @@ test.each(["true", "false"])( trace_id: firstCallParams.id, dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), }); - expect(thirdCallParams).toEqual({ + expect(thirdCallParams).toMatchObject({ end_time: expect.any(Number), outputs: { output: "Hello!" }, events: [ @@ -567,7 +559,7 @@ test.each(["true", "false"])( dotted_order: expect.stringContaining(`${firstCallParams.dotted_order}.`), parent_run_id: firstCallParams.id, }); - expect(fourthCallParams).toEqual({ + expect(fourthCallParams).toMatchObject({ end_time: expect.any(Number), inputs: { args: [ From 609851fd2f008ad4b7d49bc06d2054a4c3a95781 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 19 Jul 2024 17:48:21 -0700 Subject: [PATCH 29/31] Fix tracing for streaming calls with no config --- langchain-core/src/runnables/iter.ts | 6 ++++-- langchain-core/src/singletons/index.ts | 21 +++++++++++++++------ langchain-core/src/utils/stream.ts | 6 ++++-- 3 files changed, 23 insertions(+), 10 deletions(-) diff --git a/langchain-core/src/runnables/iter.ts b/langchain-core/src/runnables/iter.ts index 32434a648399..52b7a61db06a 100644 --- a/langchain-core/src/runnables/iter.ts +++ b/langchain-core/src/runnables/iter.ts @@ -37,7 +37,8 @@ export function* consumeIteratorInContext( while (true) { const { value, done } = AsyncLocalStorageProviderSingleton.runWithConfig( context, - iter.next.bind(iter) + iter.next.bind(iter), + true ); if (done) { break; @@ -56,7 +57,8 @@ export async function* consumeAsyncIterableInContext( const { value, done } = await AsyncLocalStorageProviderSingleton.runWithConfig( context, - iterator.next.bind(iter) + iterator.next.bind(iter), + true ); if (done) { break; diff --git a/langchain-core/src/singletons/index.ts b/langchain-core/src/singletons/index.ts index f1d6801431a0..3b4740591e50 100644 --- a/langchain-core/src/singletons/index.ts +++ b/langchain-core/src/singletons/index.ts @@ -32,12 +32,16 @@ class AsyncLocalStorageProvider { getRunnableConfig() { const storage = this.getInstance(); // this has the runnable config - // which means that I should also have an instance of a LangChainTracer + // which means that we should also have an instance of a LangChainTracer // with the run map prepopulated return storage.getStore()?.extra?.[LC_CHILD_KEY]; } - runWithConfig(config: any, callback: () => T): T { + runWithConfig( + config: any, + callback: () => T, + avoidCreatingRootRunTree?: boolean + ): T { const callbackManager = CallbackManager._configureSync( config?.callbacks, undefined, @@ -52,10 +56,15 @@ class AsyncLocalStorageProvider { (handler) => handler?.name === "langchain_tracer" ) as LangChainTracer | undefined; - const runTree = - langChainTracer && parentRunId - ? langChainTracer.convertToRunTree(parentRunId) - : new RunTree({ name: "", tracingEnabled: false }); + let runTree; + if (langChainTracer && parentRunId) { + runTree = langChainTracer.convertToRunTree(parentRunId); + } else if (!avoidCreatingRootRunTree) { + runTree = new RunTree({ + name: "", + tracingEnabled: false, + }); + } if (runTree) { runTree.extra = { ...runTree.extra, [LC_CHILD_KEY]: config }; diff --git a/langchain-core/src/utils/stream.ts b/langchain-core/src/utils/stream.ts index beb656b620a5..234cec3b900f 100644 --- a/langchain-core/src/utils/stream.ts +++ b/langchain-core/src/utils/stream.ts @@ -211,7 +211,8 @@ export class AsyncGeneratorWithSetup< } else { this.firstResult.then((_result) => resolve(undefined as S), reject); } - } + }, + true ); }); } @@ -226,7 +227,8 @@ export class AsyncGeneratorWithSetup< this.config, async () => { return this.generator.next(...args); - } + }, + true ); } From d643a62c4dc013d3dcdffafcad88cb5ec096b0b2 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 21 Jul 2024 18:00:27 -0700 Subject: [PATCH 30/31] Adds a proper tracing test for tracers initalized from env vars --- .../tracers/tests/langsmith_interop.test.ts | 124 ++++++++++-------- 1 file changed, 66 insertions(+), 58 deletions(-) diff --git a/langchain-core/src/tracers/tests/langsmith_interop.test.ts b/langchain-core/src/tracers/tests/langsmith_interop.test.ts index 61fcb7fef322..4fd4f123fd97 100644 --- a/langchain-core/src/tracers/tests/langsmith_interop.test.ts +++ b/langchain-core/src/tracers/tests/langsmith_interop.test.ts @@ -1,46 +1,45 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable no-process-env */ +/* eslint-disable no-promise-executor-return */ import { jest } from "@jest/globals"; -import { Client } from "langsmith"; import { traceable } from "langsmith/traceable"; import { RunnableLambda } from "../../runnables/base.js"; import { BaseMessage, HumanMessage } from "../../messages/index.js"; -import { LangChainTracer } from "../tracer_langchain.js"; -type ClientParams = Exclude[0], undefined>; +let fetchMock: any; -const mockClient = (config?: Omit) => { - const client = new Client({ - ...config, - apiKey: "MOCK", - autoBatchTracing: false, - }); - const callSpy = jest - .spyOn((client as any).caller, "call") - .mockResolvedValue({ ok: true, text: () => "" }); +const originalTracingEnvValue = process.env.LANGCHAIN_TRACING_V2; - const langChainTracer = new LangChainTracer({ - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore Overriden client - client, - }); +beforeEach(() => { + fetchMock = jest + .spyOn(global, "fetch") + .mockImplementation(() => + Promise.resolve({ ok: true, text: () => "" } as any) + ); + process.env.LANGCHAIN_TRACING_V2 = "true"; +}); + +afterEach(() => { + jest.restoreAllMocks(); +}); - return { client, callSpy, langChainTracer }; -}; +afterAll(() => { + process.env.LANGCHAIN_TRACING_V2 = originalTracingEnvValue; +}); test.each(["true", "false"])( "traceables nested within runnables with background callbacks %s", async (value) => { process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; - const { callSpy, langChainTracer: tracer } = mockClient(); const aiGreet = traceable( async (msg: BaseMessage, name = "world") => { + await new Promise((resolve) => setTimeout(resolve, 300)); return msg.content + name; }, - { name: "aiGreet" } + { name: "aiGreet", tracingEnabled: true } ); const root = RunnableLambda.from(async (messages: BaseMessage[]) => { @@ -50,15 +49,17 @@ test.each(["true", "false"])( return [greetOne]; }); - await root.invoke([new HumanMessage({ content: "Hello!" })], { - callbacks: [tracer], + await root.invoke([new HumanMessage({ content: "Hello!" })]); + + const relevantCalls = fetchMock.mock.calls.filter((call: any) => { + return call[0].startsWith("https://api.smith.langchain.com/runs"); }); - expect(callSpy.mock.calls.length).toEqual(4); - const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); - const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); - const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); - const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + expect(relevantCalls.length).toEqual(4); + const firstCallParams = JSON.parse((relevantCalls[0][1] as any).body); + const secondCallParams = JSON.parse((relevantCalls[1][1] as any).body); + const thirdCallParams = JSON.parse((relevantCalls[2][1] as any).body); + const fourthCallParams = JSON.parse((relevantCalls[3][1] as any).body); expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "RunnableLambda", @@ -174,11 +175,11 @@ test.each(["true", "false"])( "streaming traceables nested within runnables with background callbacks %s", async (value) => { process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; - const { callSpy, langChainTracer: tracer } = mockClient(); const aiGreet = traceable( async function* (msg: BaseMessage, name = "world") { const res = msg.content + name; + await new Promise((resolve) => setTimeout(resolve, 300)); for (const letter of res.split("")) { yield letter; } @@ -191,22 +192,21 @@ test.each(["true", "false"])( yield* aiGreet(lastMsg, "David"); }); - const stream = await root.stream( - [new HumanMessage({ content: "Hello!" })], - { - callbacks: [tracer], - } - ); + const stream = await root.stream([new HumanMessage({ content: "Hello!" })]); - for await (const chunk of stream) { - console.log(chunk); + for await (const _ of stream) { + // Just consume iterator } - expect(callSpy.mock.calls.length).toEqual(4); - const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); - const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); - const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); - const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + const relevantCalls = fetchMock.mock.calls.filter((call: any) => { + return call[0].startsWith("https://api.smith.langchain.com/runs"); + }); + + expect(relevantCalls.length).toEqual(4); + const firstCallParams = JSON.parse((relevantCalls[0][1] as any).body); + const secondCallParams = JSON.parse((relevantCalls[1][1] as any).body); + const thirdCallParams = JSON.parse((relevantCalls[2][1] as any).body); + const fourthCallParams = JSON.parse((relevantCalls[3][1] as any).body); expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "RunnableLambda", @@ -313,10 +313,10 @@ test.each(["true", "false"])( "runnables nested within traceables with background callbacks %s", async (value) => { process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; - const { client, callSpy } = mockClient(); const nested = RunnableLambda.from(async (messages: BaseMessage[]) => { const lastMsg = messages.at(-1) as HumanMessage; + await new Promise((resolve) => setTimeout(resolve, 300)); return [lastMsg.content]; }); @@ -325,16 +325,20 @@ test.each(["true", "false"])( const contents = await nested.invoke([msg]); return contents[0] + name; }, - { name: "aiGreet", client, tracingEnabled: true } + { name: "aiGreet", tracingEnabled: true } ); await aiGreet(new HumanMessage({ content: "Hello!" }), "mitochondria"); - expect(callSpy.mock.calls.length).toEqual(4); - const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); - const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); - const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); - const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + const relevantCalls = fetchMock.mock.calls.filter((call: any) => { + return call[0].startsWith("https://api.smith.langchain.com/runs"); + }); + + expect(relevantCalls.length).toEqual(4); + const firstCallParams = JSON.parse((relevantCalls[0][1] as any).body); + const secondCallParams = JSON.parse((relevantCalls[1][1] as any).body); + const thirdCallParams = JSON.parse((relevantCalls[2][1] as any).body); + const fourthCallParams = JSON.parse((relevantCalls[3][1] as any).body); expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "aiGreet", @@ -450,12 +454,12 @@ test.each(["true", "false"])( "streaming runnables nested within traceables with background callbacks %s", async (value) => { process.env.LANGCHAIN_CALLBACKS_BACKGROUND = value; - const { client, callSpy } = mockClient(); const nested = RunnableLambda.from(async function* ( messages: BaseMessage[] ) { const lastMsg = messages.at(-1) as HumanMessage; + await new Promise((resolve) => setTimeout(resolve, 300)); for (const letter of (lastMsg.content as string).split("")) { yield letter; } @@ -470,21 +474,25 @@ test.each(["true", "false"])( yield letter; } }, - { name: "aiGreet", client, tracingEnabled: true } + { name: "aiGreet", tracingEnabled: true } ); - for await (const chunk of aiGreet( + for await (const _ of aiGreet( new HumanMessage({ content: "Hello!" }), "mitochondria" )) { - console.log(chunk); + // Just consume iterator } - expect(callSpy.mock.calls.length).toEqual(4); - const firstCallParams = JSON.parse((callSpy.mock.calls[0][2] as any).body); - const secondCallParams = JSON.parse((callSpy.mock.calls[1][2] as any).body); - const thirdCallParams = JSON.parse((callSpy.mock.calls[2][2] as any).body); - const fourthCallParams = JSON.parse((callSpy.mock.calls[3][2] as any).body); + const relevantCalls = fetchMock.mock.calls.filter((call: any) => { + return call[0].startsWith("https://api.smith.langchain.com/runs"); + }); + + expect(relevantCalls.length).toEqual(4); + const firstCallParams = JSON.parse((relevantCalls[0][1] as any).body); + const secondCallParams = JSON.parse((relevantCalls[1][1] as any).body); + const thirdCallParams = JSON.parse((relevantCalls[2][1] as any).body); + const fourthCallParams = JSON.parse((relevantCalls[3][1] as any).body); expect(firstCallParams).toMatchObject({ id: firstCallParams.id, name: "aiGreet", From 22254ea9638c31037f764cb5f4ae94c914ebe507 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 21 Jul 2024 18:03:01 -0700 Subject: [PATCH 31/31] Bump LangSmith version --- langchain-core/package.json | 2 +- package.json | 1 - yarn.lock | 10 +++++----- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/langchain-core/package.json b/langchain-core/package.json index 8efc8c4bb3d1..ad9a7de938a9 100644 --- a/langchain-core/package.json +++ b/langchain-core/package.json @@ -45,7 +45,7 @@ "camelcase": "6", "decamelize": "1.2.0", "js-tiktoken": "^1.0.12", - "langsmith": "~0.1.30", + "langsmith": "~0.1.39", "ml-distance": "^4.0.0", "mustache": "^4.2.0", "p-queue": "^6.6.2", diff --git a/package.json b/package.json index a8619af3bd41..7c45191ba7be 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,6 @@ "voy-search@0.6.2": "patch:voy-search@npm%3A0.6.2#./.yarn/patches/voy-search-npm-0.6.2-d4aca30a0e.patch", "@langchain/core": "workspace:*", "better-sqlite3": "9.4.0", - "langsmith": "0.1.39-rc.0", "zod": "3.23.8" }, "lint-staged": { diff --git a/yarn.lock b/yarn.lock index 36efa39ae038..220b0fd245ba 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11033,7 +11033,7 @@ __metadata: jest: ^29.5.0 jest-environment-node: ^29.6.4 js-tiktoken: ^1.0.12 - langsmith: ~0.1.30 + langsmith: ~0.1.39 ml-distance: ^4.0.0 ml-matrix: ^6.10.4 mustache: ^4.2.0 @@ -30361,9 +30361,9 @@ __metadata: languageName: unknown linkType: soft -"langsmith@npm:0.1.39-rc.0": - version: 0.1.39-rc.0 - resolution: "langsmith@npm:0.1.39-rc.0" +"langsmith@npm:^0.1.30, langsmith@npm:~0.1.30, langsmith@npm:~0.1.39": + version: 0.1.39 + resolution: "langsmith@npm:0.1.39" dependencies: "@types/uuid": ^9.0.1 commander: ^10.0.1 @@ -30381,7 +30381,7 @@ __metadata: optional: true openai: optional: true - checksum: ab57ce54075fb80d55d230dd3b70f8824733f21b1c5ee52df1ded7671ab51fb406a4235aa0679131bfefe53e1da5a408665c95643ba719f87b0680ee4c4e00d5 + checksum: df21332662ec3a2d2d5cf915acede52b96aedf2a286259435d683f230af5926500b129cab1f0275450e0d3de6d9d8476e410ac46f5e994beb43f2e2df8a1965f languageName: node linkType: hard