From 6271e9631e87a183e25737ce0cd9595a9f5937dc Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Wed, 6 Nov 2024 14:07:27 -0800 Subject: [PATCH 1/2] fix: Remove underscore token usage. Improve documentation. --- packages/sdk/server-ai/src/api/LDAIClient.ts | 43 +++++++++++-------- .../src/api/metrics/UnderScoreTokenUsage.ts | 9 ---- .../sdk/server-ai/src/api/metrics/index.ts | 1 - 3 files changed, 25 insertions(+), 28 deletions(-) delete mode 100644 packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts diff --git a/packages/sdk/server-ai/src/api/LDAIClient.ts b/packages/sdk/server-ai/src/api/LDAIClient.ts index ceaf936af3..dd872d7bf4 100644 --- a/packages/sdk/server-ai/src/api/LDAIClient.ts +++ b/packages/sdk/server-ai/src/api/LDAIClient.ts @@ -17,37 +17,43 @@ export interface LDAIClient { interpolateTemplate(template: string, variables: Record): string; /** - * Retrieves and processes a prompt template based on the provided key, LaunchDarkly context, and - * variables. + * Retrieves and processes an AI configuration based on the provided key, LaunchDarkly context, + * and variables. This includes the model configuration and the processed prompts. * - * @param key - A unique identifier for the prompt template. This key is used to fetch the correct - * prompt from storage or configuration. - * @param context - The LaunchDarkly context object that contains relevant information about the - * current environment, user, or session. This context may influence how the prompt is processed - * or personalized. - * @param variables - A map of key-value pairs representing dynamic variables to be injected into + * @param key The key of the AI configuration. + * @param context The LaunchDarkly context object that contains relevant information about the + * current environment, user, or session. This context may influence how the configuration is + * processed or personalized. + * @param variables A map of key-value pairs representing dynamic variables to be injected into * the prompt template. The keys correspond to placeholders within the template, and the values * are the corresponding replacements. - * @param defaultValue - A fallback value to be used if the prompt template associated with the - * key is not found or if any errors occur during processing. + * @param defaultValue A fallback value containing model configuration and prompts. This will + * be used if the configurationuration is not available from launchdarkly. * - * @returns The processed prompt after all variables have been substituted in the stored prompt - * template. If the prompt cannot be retrieved or processed, the `defaultValue` is returned. + * @returns The AI configurationuration including a processed prompt after all variables have been + * substituted in the stored prompt template. This will also include a `tracker` used to track + * the state of the AI operation. If the configuration cannot be accessed from LaunchDarkly, then + * the return value will include information from the defaultValue. * * @example * ``` * const key = "welcome_prompt"; * const context = {...}; * const variables = {username: 'john'}; - * const defaultValue = {}; + * const defaultValue = { + * enabled: false, + * }; * * const result = modelConfig(key, context, defaultValue, variables); * // Output: * { - * modelId: "gpt-4o", - * temperature: 0.2, - * maxTokens: 4096, - * userDefinedKey: "myValue", + * enabled: true, + * config: { + * modelId: "gpt-4o", + * temperature: 0.2, + * maxTokens: 4096, + * userDefinedKey: "myValue", + * }, * prompt: [ * { * role: "system", @@ -57,7 +63,8 @@ export interface LDAIClient { * role: "user", * content: "Explain how you're an amazing GPT." * } - * ] + * ], + * tracker: ... * } * ``` */ diff --git a/packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts b/packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts deleted file mode 100644 index 201d95aa94..0000000000 --- a/packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { LDTokenUsage } from './LDTokenUsage'; - -export function createUnderscoreTokenUsage(data: any): LDTokenUsage { - return { - total: data.total_tokens || 0, - input: data.prompt_tokens || 0, - output: data.completion_tokens || 0, - }; -} diff --git a/packages/sdk/server-ai/src/api/metrics/index.ts b/packages/sdk/server-ai/src/api/metrics/index.ts index 4bebfd1167..9f5e199f59 100644 --- a/packages/sdk/server-ai/src/api/metrics/index.ts +++ b/packages/sdk/server-ai/src/api/metrics/index.ts @@ -1,4 +1,3 @@ export * from './BedrockTokenUsage'; export * from './LDFeedbackKind'; export * from './LDTokenUsage'; -export * from './UnderScoreTokenUsage'; From dd17df45f8a753f7d764f35b4abb6df707fb382e Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Wed, 6 Nov 2024 14:08:56 -0800 Subject: [PATCH 2/2] Remove hyphens. --- packages/sdk/server-ai/src/api/LDAIClient.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/sdk/server-ai/src/api/LDAIClient.ts b/packages/sdk/server-ai/src/api/LDAIClient.ts index dd872d7bf4..990485819c 100644 --- a/packages/sdk/server-ai/src/api/LDAIClient.ts +++ b/packages/sdk/server-ai/src/api/LDAIClient.ts @@ -10,8 +10,8 @@ export interface LDAIClient { /** * Parses and interpolates a template string with the provided variables. * - * @param template - The template string to be parsed and interpolated. - * @param variables - An object containing the variables to be used for interpolation. + * @param template The template string to be parsed and interpolated. + * @param variables An object containing the variables to be used for interpolation. * @returns The interpolated string. */ interpolateTemplate(template: string, variables: Record): string;