-
Notifications
You must be signed in to change notification settings - Fork 83
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(vercel): add OTEL based LangSmith trace exporter (#1104)
Add OTEL-based LangSmith trace exporter, conforming to Vercel AI SDK Telemetry documentation. TODO: - [x] Add unit tests with mock results and mock language model to assert the outputs as well - [x] Verify how wrapAISDKModel would work with LangSmithAISDKExporter - [x] Add `first_token_time`
- Loading branch information
Showing
11 changed files
with
2,468 additions
and
67 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,6 @@ | ||
{ | ||
"name": "langsmith", | ||
"version": "0.2.0", | ||
"version": "0.2.1", | ||
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.", | ||
"packageManager": "[email protected]", | ||
"files": [ | ||
|
@@ -33,6 +33,10 @@ | |
"langchain.js", | ||
"langchain.d.ts", | ||
"langchain.d.cts", | ||
"vercel.cjs", | ||
"vercel.js", | ||
"vercel.d.ts", | ||
"vercel.d.cts", | ||
"wrappers.cjs", | ||
"wrappers.js", | ||
"wrappers.d.ts", | ||
|
@@ -105,18 +109,20 @@ | |
"uuid": "^10.0.0" | ||
}, | ||
"devDependencies": { | ||
"@ai-sdk/openai": "^0.0.40", | ||
"@ai-sdk/openai": "^0.0.68", | ||
"@babel/preset-env": "^7.22.4", | ||
"@faker-js/faker": "^8.4.1", | ||
"@jest/globals": "^29.5.0", | ||
"@langchain/core": "^0.3.14", | ||
"@langchain/langgraph": "^0.2.18", | ||
"@langchain/openai": "^0.3.11", | ||
"@opentelemetry/sdk-trace-base": "^1.26.0", | ||
"@opentelemetry/sdk-trace-node": "^1.26.0", | ||
"@tsconfig/recommended": "^1.0.2", | ||
"@types/jest": "^29.5.1", | ||
"@typescript-eslint/eslint-plugin": "^5.59.8", | ||
"@typescript-eslint/parser": "^5.59.8", | ||
"ai": "^3.2.37", | ||
"ai": "^3.4.17", | ||
"babel-jest": "^29.5.0", | ||
"cross-env": "^7.0.3", | ||
"dotenv": "^16.1.3", | ||
|
@@ -221,6 +227,15 @@ | |
"import": "./langchain.js", | ||
"require": "./langchain.cjs" | ||
}, | ||
"./vercel": { | ||
"types": { | ||
"import": "./vercel.d.ts", | ||
"require": "./vercel.d.cts", | ||
"default": "./vercel.d.ts" | ||
}, | ||
"import": "./vercel.js", | ||
"require": "./vercel.cjs" | ||
}, | ||
"./wrappers": { | ||
"types": { | ||
"import": "./wrappers.d.ts", | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,253 @@ | ||
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node"; | ||
import { BatchSpanProcessor } from "@opentelemetry/sdk-trace-base"; | ||
|
||
import { | ||
generateText, | ||
streamText, | ||
generateObject, | ||
streamObject, | ||
tool, | ||
} from "ai"; | ||
import { openai } from "@ai-sdk/openai"; | ||
|
||
import { v4 as uuid } from "uuid"; | ||
import { z } from "zod"; | ||
import { AISDKExporter } from "../vercel.js"; | ||
import { Client } from "../index.js"; | ||
import { traceable } from "../traceable.js"; | ||
import { waitUntilRunFound, toArray } from "./utils.js"; | ||
|
||
const client = new Client(); | ||
// Not using @opentelemetry/sdk-node because we need to force flush | ||
// the spans to ensure they are sent to LangSmith between tests | ||
const provider = new NodeTracerProvider(); | ||
provider.addSpanProcessor( | ||
new BatchSpanProcessor(new AISDKExporter({ client })) | ||
); | ||
provider.register(); | ||
|
||
test("generateText", async () => { | ||
const runId = uuid(); | ||
|
||
await generateText({ | ||
model: openai("gpt-4o-mini"), | ||
messages: [ | ||
{ | ||
role: "user", | ||
content: "What are my orders and where are they? My user ID is 123", | ||
}, | ||
], | ||
tools: { | ||
listOrders: tool({ | ||
description: "list all orders", | ||
parameters: z.object({ userId: z.string() }), | ||
execute: async ({ userId }) => | ||
`User ${userId} has the following orders: 1`, | ||
}), | ||
viewTrackingInformation: tool({ | ||
description: "view tracking information for a specific order", | ||
parameters: z.object({ orderId: z.string() }), | ||
execute: async ({ orderId }) => | ||
`Here is the tracking information for ${orderId}`, | ||
}), | ||
}, | ||
experimental_telemetry: AISDKExporter.getSettings({ | ||
runId, | ||
functionId: "functionId", | ||
metadata: { userId: "123", language: "english" }, | ||
}), | ||
maxSteps: 10, | ||
}); | ||
|
||
await provider.forceFlush(); | ||
await waitUntilRunFound(client, runId, true); | ||
|
||
const storedRun = await client.readRun(runId); | ||
expect(storedRun.id).toEqual(runId); | ||
}); | ||
|
||
test("generateText with image", async () => { | ||
const runId = uuid(); | ||
await generateText({ | ||
model: openai("gpt-4o-mini"), | ||
messages: [ | ||
{ | ||
role: "user", | ||
content: [ | ||
{ | ||
type: "text", | ||
text: "What's in this picture?", | ||
}, | ||
{ | ||
type: "image", | ||
image: new URL("https://picsum.photos/200/300"), | ||
}, | ||
], | ||
}, | ||
], | ||
experimental_telemetry: AISDKExporter.getSettings({ | ||
runId, | ||
runName: "vercelImageTest", | ||
functionId: "functionId", | ||
metadata: { userId: "123", language: "english" }, | ||
}), | ||
}); | ||
|
||
await provider.forceFlush(); | ||
await waitUntilRunFound(client, runId, true); | ||
|
||
const storedRun = await client.readRun(runId); | ||
expect(storedRun.id).toEqual(runId); | ||
}); | ||
|
||
test("streamText", async () => { | ||
const runId = uuid(); | ||
const result = await streamText({ | ||
model: openai("gpt-4o-mini"), | ||
messages: [ | ||
{ | ||
role: "user", | ||
content: "What are my orders and where are they? My user ID is 123", | ||
}, | ||
], | ||
tools: { | ||
listOrders: tool({ | ||
description: "list all orders", | ||
parameters: z.object({ userId: z.string() }), | ||
execute: async ({ userId }) => | ||
`User ${userId} has the following orders: 1`, | ||
}), | ||
viewTrackingInformation: tool({ | ||
description: "view tracking information for a specific order", | ||
parameters: z.object({ orderId: z.string() }), | ||
execute: async ({ orderId }) => | ||
`Here is the tracking information for ${orderId}`, | ||
}), | ||
}, | ||
experimental_telemetry: AISDKExporter.getSettings({ | ||
runId, | ||
functionId: "functionId", | ||
metadata: { userId: "123", language: "english" }, | ||
}), | ||
maxSteps: 10, | ||
}); | ||
|
||
await toArray(result.fullStream); | ||
await provider.forceFlush(); | ||
await waitUntilRunFound(client, runId, true); | ||
|
||
const storedRun = await client.readRun(runId); | ||
expect(storedRun.id).toEqual(runId); | ||
}); | ||
|
||
test("generateObject", async () => { | ||
const runId = uuid(); | ||
await generateObject({ | ||
model: openai("gpt-4o-mini", { structuredOutputs: true }), | ||
schema: z.object({ | ||
weather: z.object({ | ||
city: z.string(), | ||
unit: z.union([z.literal("celsius"), z.literal("fahrenheit")]), | ||
}), | ||
}), | ||
prompt: "What's the weather in Prague?", | ||
experimental_telemetry: AISDKExporter.getSettings({ | ||
runId, | ||
functionId: "functionId", | ||
metadata: { userId: "123", language: "english" }, | ||
}), | ||
}); | ||
|
||
await provider.forceFlush(); | ||
await waitUntilRunFound(client, runId, true); | ||
|
||
const storedRun = await client.readRun(runId); | ||
expect(storedRun.id).toEqual(runId); | ||
}); | ||
|
||
test("streamObject", async () => { | ||
const runId = uuid(); | ||
const result = await streamObject({ | ||
model: openai("gpt-4o-mini", { structuredOutputs: true }), | ||
schema: z.object({ | ||
weather: z.object({ | ||
city: z.string(), | ||
unit: z.union([z.literal("celsius"), z.literal("fahrenheit")]), | ||
}), | ||
}), | ||
prompt: "What's the weather in Prague?", | ||
experimental_telemetry: AISDKExporter.getSettings({ | ||
runId, | ||
functionId: "functionId", | ||
metadata: { | ||
userId: "123", | ||
language: "english", | ||
}, | ||
}), | ||
}); | ||
|
||
await toArray(result.partialObjectStream); | ||
await provider.forceFlush(); | ||
await waitUntilRunFound(client, runId, true); | ||
|
||
const storedRun = await client.readRun(runId); | ||
expect(storedRun.id).toEqual(runId); | ||
}); | ||
|
||
test("traceable", async () => { | ||
const runId = uuid(); | ||
|
||
const wrappedText = traceable( | ||
async (content: string) => { | ||
const { text } = await generateText({ | ||
model: openai("gpt-4o-mini"), | ||
messages: [{ role: "user", content }], | ||
tools: { | ||
listOrders: tool({ | ||
description: "list all orders", | ||
parameters: z.object({ userId: z.string() }), | ||
execute: async ({ userId }) => | ||
`User ${userId} has the following orders: 1`, | ||
}), | ||
viewTrackingInformation: tool({ | ||
description: "view tracking information for a specific order", | ||
parameters: z.object({ orderId: z.string() }), | ||
execute: async ({ orderId }) => | ||
`Here is the tracking information for ${orderId}`, | ||
}), | ||
}, | ||
experimental_telemetry: AISDKExporter.getSettings({ | ||
functionId: "functionId", | ||
runName: "nestedVercelTrace", | ||
metadata: { userId: "123", language: "english" }, | ||
}), | ||
maxSteps: 10, | ||
}); | ||
|
||
const foo = traceable( | ||
async () => { | ||
return "bar"; | ||
}, | ||
{ | ||
name: "foo", | ||
} | ||
); | ||
|
||
await foo(); | ||
|
||
return { text }; | ||
}, | ||
{ name: "parentTraceable", id: runId } | ||
); | ||
|
||
const result = await wrappedText( | ||
"What are my orders and where are they? My user ID is 123. Use available tools." | ||
); | ||
await waitUntilRunFound(client, runId, true); | ||
const storedRun = await client.readRun(runId); | ||
expect(storedRun.outputs).toEqual(result); | ||
}); | ||
|
||
afterAll(async () => { | ||
await provider.shutdown(); | ||
}); |
Oops, something went wrong.