From 5fe36fbd5b7047428204427fe6849d49de6ee952 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Thu, 7 Nov 2024 09:20:21 -0800 Subject: [PATCH] fix: Remove underscore token usage. Improve documentation. (#667) --- packages/sdk/server-ai/src/api/LDAIClient.ts | 47 +++++++++++-------- .../src/api/metrics/UnderScoreTokenUsage.ts | 9 ---- .../sdk/server-ai/src/api/metrics/index.ts | 1 - 3 files changed, 27 insertions(+), 30 deletions(-) delete mode 100644 packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts diff --git a/packages/sdk/server-ai/src/api/LDAIClient.ts b/packages/sdk/server-ai/src/api/LDAIClient.ts index ceaf936af..990485819 100644 --- a/packages/sdk/server-ai/src/api/LDAIClient.ts +++ b/packages/sdk/server-ai/src/api/LDAIClient.ts @@ -10,44 +10,50 @@ export interface LDAIClient { /** * Parses and interpolates a template string with the provided variables. * - * @param template - The template string to be parsed and interpolated. - * @param variables - An object containing the variables to be used for interpolation. + * @param template The template string to be parsed and interpolated. + * @param variables An object containing the variables to be used for interpolation. * @returns The interpolated string. */ interpolateTemplate(template: string, variables: Record): string; /** - * Retrieves and processes a prompt template based on the provided key, LaunchDarkly context, and - * variables. + * Retrieves and processes an AI configuration based on the provided key, LaunchDarkly context, + * and variables. This includes the model configuration and the processed prompts. * - * @param key - A unique identifier for the prompt template. This key is used to fetch the correct - * prompt from storage or configuration. - * @param context - The LaunchDarkly context object that contains relevant information about the - * current environment, user, or session. This context may influence how the prompt is processed - * or personalized. - * @param variables - A map of key-value pairs representing dynamic variables to be injected into + * @param key The key of the AI configuration. + * @param context The LaunchDarkly context object that contains relevant information about the + * current environment, user, or session. This context may influence how the configuration is + * processed or personalized. + * @param variables A map of key-value pairs representing dynamic variables to be injected into * the prompt template. The keys correspond to placeholders within the template, and the values * are the corresponding replacements. - * @param defaultValue - A fallback value to be used if the prompt template associated with the - * key is not found or if any errors occur during processing. + * @param defaultValue A fallback value containing model configuration and prompts. This will + * be used if the configurationuration is not available from launchdarkly. * - * @returns The processed prompt after all variables have been substituted in the stored prompt - * template. If the prompt cannot be retrieved or processed, the `defaultValue` is returned. + * @returns The AI configurationuration including a processed prompt after all variables have been + * substituted in the stored prompt template. This will also include a `tracker` used to track + * the state of the AI operation. If the configuration cannot be accessed from LaunchDarkly, then + * the return value will include information from the defaultValue. * * @example * ``` * const key = "welcome_prompt"; * const context = {...}; * const variables = {username: 'john'}; - * const defaultValue = {}; + * const defaultValue = { + * enabled: false, + * }; * * const result = modelConfig(key, context, defaultValue, variables); * // Output: * { - * modelId: "gpt-4o", - * temperature: 0.2, - * maxTokens: 4096, - * userDefinedKey: "myValue", + * enabled: true, + * config: { + * modelId: "gpt-4o", + * temperature: 0.2, + * maxTokens: 4096, + * userDefinedKey: "myValue", + * }, * prompt: [ * { * role: "system", @@ -57,7 +63,8 @@ export interface LDAIClient { * role: "user", * content: "Explain how you're an amazing GPT." * } - * ] + * ], + * tracker: ... * } * ``` */ diff --git a/packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts b/packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts deleted file mode 100644 index 201d95aa9..000000000 --- a/packages/sdk/server-ai/src/api/metrics/UnderScoreTokenUsage.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { LDTokenUsage } from './LDTokenUsage'; - -export function createUnderscoreTokenUsage(data: any): LDTokenUsage { - return { - total: data.total_tokens || 0, - input: data.prompt_tokens || 0, - output: data.completion_tokens || 0, - }; -} diff --git a/packages/sdk/server-ai/src/api/metrics/index.ts b/packages/sdk/server-ai/src/api/metrics/index.ts index 4bebfd116..9f5e199f5 100644 --- a/packages/sdk/server-ai/src/api/metrics/index.ts +++ b/packages/sdk/server-ai/src/api/metrics/index.ts @@ -1,4 +1,3 @@ export * from './BedrockTokenUsage'; export * from './LDFeedbackKind'; export * from './LDTokenUsage'; -export * from './UnderScoreTokenUsage';