Skip to content

Commit

Permalink
use partial json and new json delta block
Browse files Browse the repository at this point in the history
  • Loading branch information
roodboi committed Sep 26, 2024
1 parent 409fb8d commit bd383aa
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 94 deletions.
Binary file modified bun.lockb
Binary file not shown.
119 changes: 60 additions & 59 deletions public-packages/llm-client/src/providers/anthropic/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,21 +138,9 @@ export class AnthropicProvider extends Anthropic implements OpenAILikeClient<"an
} as OpenAI.ChatCompletion.Choice
]
}
} else {
return {
...transformedResponse,
object: "chat.completion.chunk",
choices: [
{
delta: {
content: ""
},
finish_reason: null,
index: 0
}
]
}
}

return transformedResponse
}

/**
Expand Down Expand Up @@ -233,71 +221,84 @@ export class AnthropicProvider extends Anthropic implements OpenAILikeClient<"an
): AsyncIterable<ExtendedCompletionChunkAnthropic> {
let finalChatCompletion: ExtendedCompletionChunkAnthropic | null = null

for await (const data of response) {
switch (data.type) {
for await (const event of response) {
switch (event.type) {
case "message_start":
this.log("debug", "Message start:", data)
finalChatCompletion = (await this.transformResponse(data.message, {
stream: true
})) as ExtendedCompletionChunkAnthropic
this.log("debug", "Message start:", event)
finalChatCompletion = {
id: event.message.id,
object: "chat.completion.chunk",
created: Date.now(),
model: event.message.model,
choices: [
{
index: 0,
delta: { role: "assistant" },
finish_reason: null
}
],
usage: {
prompt_tokens: event.message.usage.input_tokens,
completion_tokens: 0,
total_tokens: event.message.usage.input_tokens
},
originResponse: event.message
}

yield finalChatCompletion
continue
break

case "content_block_start":
this.log("debug", "Content block start:", event)
break

case "content_block_delta":
if (data.delta && data.delta.type === "text_delta" && data.delta?.text) {
if (finalChatCompletion && finalChatCompletion.choices) {
if (data.delta.text) {
finalChatCompletion.choices[0].delta = {
content: data.delta.text,
role: "assistant"
}
if (finalChatCompletion && finalChatCompletion.choices) {
if (event.delta.type === "text_delta") {
finalChatCompletion.choices[0].delta = {
content: event.delta.text,
role: "assistant"
}
}

yield finalChatCompletion as ExtendedCompletionChunkAnthropic
}

if (data.delta && data.delta.type === "input_json_delta") {
if (finalChatCompletion && finalChatCompletion.choices) {
if (data.delta.partial_json) {
finalChatCompletion.choices[0].delta = {
content:
(finalChatCompletion.choices?.[0]?.delta?.content ?? "") +
data.delta.partial_json,
role: "assistant"
}
} else if (event.delta.type === "input_json_delta") {
finalChatCompletion.choices[0].delta = {
content: event.delta.partial_json,
role: "assistant"
}
}
}

continue

case "content_block_stop":
this.log("debug", "Content block stop:", data)

if (finalChatCompletion && finalChatCompletion.choices) {
finalChatCompletion.choices[0].finish_reason = "stop"
yield finalChatCompletion
}
break

yield finalChatCompletion as ExtendedCompletionChunkAnthropic
continue
case "content_block_stop":
this.log("debug", "Content block stop:", event)
break

case "message_delta":
if (finalChatCompletion && finalChatCompletion?.usage) {
finalChatCompletion.usage.completion_tokens = data?.usage?.output_tokens
if (finalChatCompletion && finalChatCompletion.usage) {
finalChatCompletion.usage.completion_tokens = event.usage?.output_tokens || 0
finalChatCompletion.usage.total_tokens =
finalChatCompletion.usage.prompt_tokens + data?.usage?.output_tokens
finalChatCompletion.usage.prompt_tokens + finalChatCompletion.usage.completion_tokens
}

continue
break

case "message_stop":
this.log("debug", "Message stop:", data)
this.log("debug", "Message stop:", event)
if (finalChatCompletion && finalChatCompletion.choices) {
finalChatCompletion.choices[0].finish_reason = "stop"
finalChatCompletion.choices[0].delta = {
content: null,
role: "assistant"
}
yield finalChatCompletion
}
break

default:
this.log("warn", "Unknown event type:", event)
}
}
}

/**
* Creates a chat completion using the Anthropic API.
* @param params - The chat completion parameters.
Expand Down
48 changes: 13 additions & 35 deletions public-packages/llm-client/tests/anthropic.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@ import { omit } from "@/lib"
import Anthropic from "@anthropic-ai/sdk"
import { describe, expect, test } from "bun:test"

for await (const model of ["claude-3-opus-20240229", "claude-3-sonnet-20240229"] as const) {
for await (const model of ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229"] as const) {
await createTestCase(model)
}

const anthropicClient = createLLMClient({
provider: "anthropic",
logLevel: "error"
provider: "anthropic"
})

async function createTestCase(model: Anthropic.CompletionCreateParams["model"]) {
Expand Down Expand Up @@ -105,12 +104,13 @@ async function createTestCase(model: Anthropic.CompletionCreateParams["model"])
final += message.choices?.[0].delta?.content ?? ""
}

console.log(final)
expect(final).toBe(`{\"name\": \"Dimitri Kennedy\"}`)
})

test("Function Calling complex schema", async () => {
test("Function Calling STREAM - complex schema", async () => {
const completion = await anthropicClient.chat.completions.create({
model,
stream: true,
max_tokens: 1000,
messages: [
{
Expand Down Expand Up @@ -209,36 +209,14 @@ async function createTestCase(model: Anthropic.CompletionCreateParams["model"])
]
})

//@ts-expect-error fails because its optionally undefiened, which is fine - if we fail we fail
expect(omit(["id"], completion?.choices?.[0]?.message.tool_calls?.[0])).toEqual({
type: "function",
function: {
name: "process_user_data",
arguments: JSON.stringify({
userDetails: {
firstName: "John",
lastName: "Doe",
contactDetails: {
email: "[email protected]",
phoneNumber: "555-1234"
}
},
jobHistory: [
{
companyName: "Acme Corp",
role: "Software Engineer",
years: 5
},
{
companyName: "Globex Inc.",
role: "Lead Developer",
years: 3
}
],
skills: ["Programming", "Leadership", "Communication"]
})
}
})
let final = ""
for await (const message of completion) {
final += message.choices?.[0].delta?.content ?? ""
}

expect(final).toBe(
`{\"userDetails\": {\"firstName\": \"John\", \"lastName\": \"Doe\", \"contactDetails\": {\"email\": \"[email protected]\", \"phoneNumber\": \"555-1234\"}}, \"jobHistory\": [{\"companyName\": \"Acme Corp\", \"role\": \"Software Engineer\", \"years\": 5}, {\"companyName\": \"Globex Inc.\", \"role\": \"Lead Developer\", \"years\": 3}], \"skills\": [\"Programming\", \"Leadership\", \"Communication\"]}`
)
})

test("Standard stream", async () => {
Expand Down

0 comments on commit bd383aa

Please sign in to comment.