Skip to content

Commit

Permalink
fix(js): More deeply copy OpenAI SDK when wrapping (#1081)
Browse files Browse the repository at this point in the history
This fixes tracing of `openai.beta.chat.completions.parse`. There were
issues caused by OpenAI methods internally calling others.
  • Loading branch information
jacoblee93 authored Oct 9, 2024
1 parent f034c38 commit 550b28d
Show file tree
Hide file tree
Showing 6 changed files with 154 additions and 52 deletions.
4 changes: 2 additions & 2 deletions js/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "langsmith",
"version": "0.1.62",
"version": "0.1.63",
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
"packageManager": "[email protected]",
"files": [
Expand Down Expand Up @@ -127,7 +127,7 @@
"eslint-plugin-prettier": "^4.2.1",
"jest": "^29.5.0",
"langchain": "^0.3.2",
"openai": "^4.38.5",
"openai": "^4.67.3",
"prettier": "^2.8.8",
"ts-jest": "^29.1.0",
"ts-node": "^10.9.1",
Expand Down
2 changes: 1 addition & 1 deletion js/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ export { RunTree, type RunTreeConfig } from "./run_trees.js";
export { overrideFetchImplementation } from "./singletons/fetch.js";

// Update using yarn bump-version
export const __version__ = "0.1.62";
export const __version__ = "0.1.63";
4 changes: 2 additions & 2 deletions js/src/tests/traceable.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ describe("async generators", () => {
);

const numbers: number[] = [];
for await (const num of await stream()) {
for await (const num of (await stream()) as unknown as AsyncGenerator<number>) {
numbers.push(num);
}

Expand Down Expand Up @@ -719,7 +719,7 @@ describe("deferred input", () => {
const { client, callSpy } = mockClient();
const parrotStream = traceable(
async function* parrotStream(input: ReadableStream<string>) {
for await (const token of input) {
for await (const token of input as unknown as AsyncGenerator<string>) {
yield token;
}
},
Expand Down
40 changes: 40 additions & 0 deletions js/src/tests/wrapped_openai.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import { wrapOpenAI } from "../wrappers/index.js";
import { Client } from "../client.js";
import { mockClient } from "./utils/mock_client.js";
import { getAssumedTreeFromCalls } from "./utils/tree.js";
import { zodResponseFormat } from "openai/helpers/zod";
import { z } from "zod";

test("wrapOpenAI should return type compatible with OpenAI", async () => {
let originalClient = new OpenAI();
Expand Down Expand Up @@ -534,3 +536,41 @@ test("chat.concurrent extra name", async () => {
},
});
});

test.concurrent("beta.chat.completions.parse", async () => {
const { client, callSpy } = mockClient();

const openai = wrapOpenAI(new OpenAI(), {
client,
});

await openai.beta.chat.completions.parse({
model: "gpt-4o-mini",
temperature: 0,
messages: [
{
role: "user",
content: "I am Jacob",
},
],
response_format: zodResponseFormat(
z.object({
name: z.string(),
}),
"name"
),
});

for (const call of callSpy.mock.calls) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
expect(["POST", "PATCH"]).toContain((call[2] as any)["method"]);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
expect(JSON.parse((call[2] as any).body).extra.metadata).toEqual({
ls_model_name: "gpt-4o-mini",
ls_model_type: "chat",
ls_provider: "openai",
ls_temperature: 0,
});
}
callSpy.mockClear();
});
130 changes: 99 additions & 31 deletions js/src/wrappers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ import { isTraceableFunction, traceable } from "../traceable.js";

// Extra leniency around types in case multiple OpenAI SDK versions get installed
type OpenAIType = {
beta?: {
chat?: {
completions?: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parse?: (...args: any[]) => any;
};
};
};
chat: {
completions: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
Expand Down Expand Up @@ -216,38 +224,98 @@ export const wrapOpenAI = <T extends OpenAIType>(
);
}

openai.chat.completions.create = traceable(
openai.chat.completions.create.bind(openai.chat.completions),
{
name: "ChatOpenAI",
run_type: "llm",
aggregator: chatAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload: unknown) => {
if (typeof payload !== "object" || payload == null) return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload as OpenAI.ChatCompletionCreateParams;
// Some internal OpenAI methods call each other, so we need to preserve original
// OpenAI methods.
const tracedOpenAIClient = { ...openai };

const ls_stop =
(typeof params.stop === "string" ? [params.stop] : params.stop) ??
undefined;
if (
openai.beta &&
openai.beta.chat &&
openai.beta.chat.completions &&
typeof openai.beta.chat.completions.parse === "function"
) {
tracedOpenAIClient.beta = {
...openai.beta,
chat: {
...openai.beta.chat,
completions: {
...openai.beta.chat.completions,
parse: traceable(
openai.beta.chat.completions.parse.bind(
openai.beta.chat.completions
),
{
name: "ChatOpenAI",
run_type: "llm",
aggregator: chatAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload: unknown) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload as OpenAI.ChatCompletionCreateParams;

return {
ls_provider: "openai",
ls_model_type: "chat",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
const ls_stop =
(typeof params.stop === "string"
? [params.stop]
: params.stop) ?? undefined;

return {
ls_provider: "openai",
ls_model_type: "chat",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
},
...options,
}
),
},
},
...options,
}
);
};
}

tracedOpenAIClient.chat = {
...openai.chat,
completions: {
...openai.chat.completions,
create: traceable(
openai.chat.completions.create.bind(openai.chat.completions),
{
name: "ChatOpenAI",
run_type: "llm",
aggregator: chatAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload: unknown) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload as OpenAI.ChatCompletionCreateParams;

const ls_stop =
(typeof params.stop === "string" ? [params.stop] : params.stop) ??
undefined;

openai.completions.create = traceable(
openai.completions.create.bind(openai.completions),
{
return {
ls_provider: "openai",
ls_model_type: "chat",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
},
...options,
}
),
},
};

tracedOpenAIClient.completions = {
...openai.completions,
create: traceable(openai.completions.create.bind(openai.completions), {
name: "OpenAI",
run_type: "llm",
aggregator: textAggregator,
Expand All @@ -271,8 +339,8 @@ export const wrapOpenAI = <T extends OpenAIType>(
};
},
...options,
}
);
}),
};

return openai as PatchedOpenAIClient<T>;
return tracedOpenAIClient as PatchedOpenAIClient<T>;
};
26 changes: 10 additions & 16 deletions js/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3952,34 +3952,33 @@ onetime@^5.1.2:
dependencies:
mimic-fn "^2.1.0"

openai@^4.38.5:
version "4.52.7"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.7.tgz#e32b000142287a9e8eda8512ba28df33d11ec1f1"
integrity sha512-dgxA6UZHary6NXUHEDj5TWt8ogv0+ibH+b4pT5RrWMjiRZVylNwLcw/2ubDrX5n0oUmHX/ZgudMJeemxzOvz7A==
openai@^4.57.3:
version "4.61.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.61.1.tgz#1fe2fa231b6de54fad32785528d7628dbbf68ab4"
integrity sha512-jZ2WRn+f4QWZkYnrUS+xzEUIBllsGN75dUCaXmMIHcv2W9yn7O8amaReTbGHCNEYkL43vuDOcxPUWfNPUmoD3Q==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
"@types/qs" "^6.9.15"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
web-streams-polyfill "^3.2.1"
qs "^6.10.3"

openai@^4.57.3:
version "4.61.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.61.1.tgz#1fe2fa231b6de54fad32785528d7628dbbf68ab4"
integrity sha512-jZ2WRn+f4QWZkYnrUS+xzEUIBllsGN75dUCaXmMIHcv2W9yn7O8amaReTbGHCNEYkL43vuDOcxPUWfNPUmoD3Q==
openai@^4.67.3:
version "4.67.3"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.67.3.tgz#a7c1b59cb9eca064f3ff3d439b7bf51487d88a49"
integrity sha512-HT2tZgjLgRqbLQNKmYtjdF/4TQuiBvg1oGvTDhwpSEQzxo6/oM1us8VQ53vBK2BiKvCxFuq6gKGG70qfwrNhKg==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
"@types/qs" "^6.9.15"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
qs "^6.10.3"

openapi-types@^12.1.3:
version "12.1.3"
Expand Down Expand Up @@ -4748,11 +4747,6 @@ [email protected]:
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38"
integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==

web-streams-polyfill@^3.2.1:
version "3.3.2"
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz#32e26522e05128203a7de59519be3c648004343b"
integrity sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==

webidl-conversions@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
Expand Down

0 comments on commit 550b28d

Please sign in to comment.