Skip to content

Commit

Permalink
Merge branch 'main' into wfh/add_dotted_order_trace_id_run_tree
Browse files Browse the repository at this point in the history
  • Loading branch information
hinthornw committed Feb 10, 2024
2 parents 50abf5e + c5742d8 commit 1fd4d37
Show file tree
Hide file tree
Showing 4 changed files with 104 additions and 4 deletions.
35 changes: 35 additions & 0 deletions js/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,8 @@ export class Client {

private autoBatchTracing = false;

private batchEndpointSupported?: boolean;

private pendingAutoBatchedRuns: AutoBatchQueueItem[] = [];

private pendingAutoBatchedRunLimit = 100;
Expand Down Expand Up @@ -537,6 +539,21 @@ export class Client {
}
}

protected async batchEndpointIsSupported() {
const response = await fetch(`${this.apiUrl}/info`, {
method: "GET",
headers: { Accept: "application/json" },
signal: AbortSignal.timeout(this.timeout_ms),
});
if (!response.ok) {
// consume the response body to release the connection
// https://undici.nodejs.org/#/?id=garbage-collection
await response.text();
return false;
}
return true;
}

public async createRun(run: CreateRunParams): Promise<void> {
if (!this._filterForSampling([run]).length) {
return;
Expand Down Expand Up @@ -632,6 +649,24 @@ export class Client {
preparedCreateParams = await mergeRuntimeEnvIntoRunCreates(
preparedCreateParams
);
if (this.batchEndpointSupported === undefined) {
this.batchEndpointSupported = await this.batchEndpointIsSupported();
}
if (!this.batchEndpointSupported) {
this.autoBatchTracing = false;
for (const preparedCreateParam of body.post) {
await this.createRun(preparedCreateParam as CreateRunParams);
}
for (const preparedUpdateParam of body.patch) {
if (preparedUpdateParam.id !== undefined) {
await this.updateRun(
preparedUpdateParam.id,
preparedUpdateParam as UpdateRunParams
);
}
}
return;
}
const headers = {
...this.headers,
"Content-Type": "application/json",
Expand Down
65 changes: 65 additions & 0 deletions js/src/tests/batch_client.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ describe("Batch client tracing", () => {
ok: true,
text: () => "",
});
jest
.spyOn(client as any, "batchEndpointIsSupported")
.mockResolvedValue(true);
const projectName = "__test_batch";

const runId = uuidv4();
Expand Down Expand Up @@ -68,6 +71,9 @@ describe("Batch client tracing", () => {
ok: true,
text: () => "",
});
jest
.spyOn(client as any, "batchEndpointIsSupported")
.mockResolvedValue(true);
const projectName = "__test_batch";

const runId = uuidv4();
Expand Down Expand Up @@ -134,6 +140,9 @@ describe("Batch client tracing", () => {
ok: true,
text: () => "",
});
jest
.spyOn(client as any, "batchEndpointIsSupported")
.mockResolvedValue(true);
const projectName = "__test_batch";

const runId = uuidv4();
Expand Down Expand Up @@ -235,6 +244,9 @@ describe("Batch client tracing", () => {
ok: true,
text: () => "",
});
jest
.spyOn(client as any, "batchEndpointIsSupported")
.mockResolvedValue(true);
const projectName = "__test_batch";

const runIds = await Promise.all(
Expand Down Expand Up @@ -296,4 +308,57 @@ describe("Batch client tracing", () => {
patch: [],
});
});

it("If batching is unsupported, fall back to old endpoint", async () => {
const client = new Client({
apiKey: "test-api-key",
autoBatchTracing: true,
});
const callSpy = jest
.spyOn((client as any).caller, "call")
.mockResolvedValue({
ok: true,
text: () => "",
});
jest
.spyOn(client as any, "batchEndpointIsSupported")
.mockResolvedValue(false);
const projectName = "__test_batch";

const runId = uuidv4();
const dottedOrder = convertToDottedOrderFormat(
new Date().getTime() / 1000,
runId
);
await client.createRun({
id: runId,
project_name: projectName,
name: "test_run",
run_type: "llm",
inputs: { text: "hello world" },
trace_id: runId,
dotted_order: dottedOrder,
});

await new Promise((resolve) => setTimeout(resolve, 300));

const calledRequestParam: any = callSpy.mock.calls[0][2];
expect(JSON.parse(calledRequestParam?.body)).toMatchObject({
id: runId,
session_name: projectName,
extra: expect.anything(),
start_time: expect.any(Number),
name: "test_run",
run_type: "llm",
inputs: { text: "hello world" },
trace_id: runId,
dotted_order: dottedOrder,
});

expect(callSpy).toHaveBeenCalledWith(
fetch,
"https://api.smith.langchain.com/runs",
expect.objectContaining({ body: expect.any(String) })
);
});
});
6 changes: 3 additions & 3 deletions python/langsmith/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -840,7 +840,7 @@ def upload_csv(

@staticmethod
def _run_transform(
run: Union[ls_schemas.Run, dict, ls_schemas.RunLikeDict],
run: Union[ls_schemas.Run, dict, ls_schemas.RunLikeDict], update: bool = False
) -> dict:
"""
Transforms the given run object into a dictionary representation.
Expand All @@ -863,7 +863,7 @@ def _run_transform(
run_create["inputs"] = _hide_inputs(run_create["inputs"])
if "outputs" in run_create:
run_create["outputs"] = _hide_outputs(run_create["outputs"])
if not run_create.get("start_time"):
if not update and not run_create.get("start_time"):
run_create["start_time"] = datetime.datetime.utcnow()
return run_create

Expand Down Expand Up @@ -1024,7 +1024,7 @@ def batch_ingest_runs(
return
# transform and convert to dicts
create_dicts = [self._run_transform(run) for run in create or []]
update_dicts = [self._run_transform(run) for run in update or []]
update_dicts = [self._run_transform(run, update=True) for run in update or []]
# combine post and patch dicts where possible
if update_dicts and create_dicts:
create_by_id = {run["id"]: run for run in create_dicts}
Expand Down
2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langsmith"
version = "0.0.88"
version = "0.0.89"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
authors = ["LangChain <[email protected]>"]
license = "MIT"
Expand Down

0 comments on commit 1fd4d37

Please sign in to comment.