Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add google support for llm-client (aka llm-polyglot) #69

Closed
wants to merge 10 commits into from
Binary file modified bun.lockb
Binary file not shown.
146 changes: 146 additions & 0 deletions public-packages/llm-client/examples/google.function.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
import { createLLMClient } from "@/index"

const googleClient = createLLMClient({
provider: "google"
})

//
// Simple Chat
//

// const completion = await googleClient.chat.completions.create({
// model: "gemini-1.5-flash-latest",
// messages: [
// {
// role: "user",
// content: "How much does a soul weigh?"
// }
// ],
// max_tokens: 1000
// })

// console.log(JSON.stringify(completion, null, 2))

//
// Function calling
//

// const completion2 = await googleClient.chat.completions.create({
// model: "gemini-1.5-flash-latest",
// max_tokens: 1000,
// messages: [
// {
// role: "user",
// content: "My name is Spartacus."
// }
// ],
// tool_choice: {
// type: "function",
// function: {
// name: "say_hello"
// }
// },
// tools: [
// {
// type: "function",
// function: {
// name: "say_hello",
// description: "Say hello",
// parameters: {
// type: "object",
// properties: {
// name: {
// type: "string"
// }
// },
// required: ["name"]
// //additionalProperties: false
// }
// }
// }
// ]
// })

// console.log(JSON.stringify(completion2, null, 2))

//
// Streaming chat
//

// const completion3 = await googleClient.chat.completions.create({
// model: "gemini-1.5-flash-latest",
// messages: [
// {
// role: "user",
// //content: "Write a soliloquy about the humidity."
// content: "Write an essay about the chemical composition of dirt."
// }
// ],
// max_tokens: 1000,
// stream: true
// })

//expect(completion).toBeTruthy
//let final = ""
// console.log({ completion3 })
// for await (const message of completion3) {
// console.log({ message })
// //final += message.choices?.[0].delta?.content ?? ""
// }

////////////////////////////////////////
// content caching
// note: need pay-as-you-go account - not available on free tier
////////////////////////////////////////

// Generate a very long string
let longContentString = ""
for (let i = 0; i < 32001; i++) {
longContentString += "Purple cats drink gatorade."
longContentString += i % 8 === 7 ? "\n" : " "
}

// Add content to cache
const cacheResult = await googleClient.cacheManager.create({
//const cacheResult = await googleClient.createCacheManager({
ttlSeconds: 600,
model: "models/gemini-1.5-pro-001",
messages: [{ role: "user", content: longContentString }],
max_tokens: 1000
})

// Get name from cache result
const cacheName = cacheResult?.name ?? ""
console.log("Cache name: ", cacheName)

// List caches
const cacheListResult = await googleClient.cacheManager.list()
console.log("cacheListResult: ", JSON.stringify(cacheListResult, null, 2))

// Delete cache
// await googleClient.cacheManager.delete(cacheName)
// cacheListResult = await googleClient.cacheManager.list()
// console.log("cacheListResult after delete: ", JSON.stringify(cacheListResult, null, 2))

// Delete all caches
// cacheListResult?.cachedContents?.forEach(async cache => {
// if (cache.name) await googleClient.cacheManager.delete(cache.name)
// })

// Pass name into additionalProperties
const completion4 = await googleClient.chat.completions.create({
// model: "gemini-1.5-flash-latest",
model: "models/gemini-1.5-pro-001",
messages: [
{
role: "user",
content: "What do purple cats drink?"
}
],
max_tokens: 10000,
additionalProperties: {
cacheName
}
})

console.log("Completion: ", JSON.stringify(completion4, null, 2))
1 change: 1 addition & 0 deletions public-packages/llm-client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
"author": "Dimitri Kennedy <[email protected]> (https://hack.dance)",
"homepage": "https://island.novy.work",
"dependencies": {
"@google/generative-ai": "^0.14.1",
"json-schema": "^0.4.0"
},
"peerDependencies": {
Expand Down
20 changes: 16 additions & 4 deletions public-packages/llm-client/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import { AnthropicProvider } from "@/providers/anthropic"
import { GoogleProvider } from "@/providers/google"
import { OpenAIProvider } from "@/providers/openai"
import { OpenAILikeClient, Providers } from "@/types"
import { TextDecoderStream, TextEncoderStream } from "@/utils/polyfills"
import { ClientOptions } from "openai"

// polyfills
globalThis.TextEncoderStream ||= TextEncoderStream
globalThis.TextDecoderStream ||= TextDecoderStream

export class LLMClient<P extends Providers> {
private providerInstance: OpenAILikeClient<P>

Expand All @@ -11,10 +17,16 @@ export class LLMClient<P extends Providers> {
provider: P
}
) {
if (opts?.provider === "openai") {
this.providerInstance = new OpenAIProvider(opts) as OpenAILikeClient<P>
} else {
this.providerInstance = new AnthropicProvider(opts) as unknown as OpenAILikeClient<P>
switch (opts?.provider) {
case "anthropic":
this.providerInstance = new AnthropicProvider(opts) as unknown as OpenAILikeClient<P>
break
case "google":
this.providerInstance = new GoogleProvider(opts) as unknown as OpenAILikeClient<P>
break
case "openai":
default:
this.providerInstance = new OpenAIProvider(opts) as OpenAILikeClient<P>
}

const proxyHandler: ProxyHandler<OpenAILikeClient<P>> = {
Expand Down
Loading
Loading