Skip to content

Commit

Permalink
feat: managed setup for togetherai model
Browse files Browse the repository at this point in the history
  • Loading branch information
Henry Fontanier committed Dec 11, 2024
1 parent c678d5f commit 17c8086
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 0 deletions.
10 changes: 10 additions & 0 deletions front/components/providers/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {
GoogleLogo,
MistralLogo,
OpenaiLogo,
PlanetIcon,
} from "@dust-tt/sparkle";
import type { ModelConfig, SUPPORTED_MODEL_CONFIGS } from "@dust-tt/types";
import {
Expand All @@ -16,6 +17,10 @@ import {
MISTRAL_CODESTRAL_MODEL_CONFIG,
MISTRAL_LARGE_MODEL_CONFIG,
MISTRAL_SMALL_MODEL_CONFIG,
TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_CONFIG,
TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_CONFIG,
TOGETHERAI_QWEN_32B_PREVIEW_MODEL_CONFIG,
TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_CONFIG,
} from "@dust-tt/types";
import type { ComponentType } from "react";

Expand All @@ -26,6 +31,7 @@ export const MODEL_PROVIDER_LOGOS: Record<ModelProvider, ComponentType> = {
anthropic: AnthropicLogo,
mistral: MistralLogo,
google_ai_studio: GoogleLogo,
togetherai: PlanetIcon,
};

export const USED_MODEL_CONFIGS: readonly ModelConfig[] = [
Expand All @@ -39,4 +45,8 @@ export const USED_MODEL_CONFIGS: readonly ModelConfig[] = [
MISTRAL_CODESTRAL_MODEL_CONFIG,
GEMINI_PRO_DEFAULT_MODEL_CONFIG,
GEMINI_FLASH_DEFAULT_MODEL_CONFIG,
TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_CONFIG,
TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_CONFIG,
TOGETHERAI_QWEN_32B_PREVIEW_MODEL_CONFIG,
TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_CONFIG,
] as const;
1 change: 1 addition & 0 deletions front/components/workspace/ProviderManagementModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ const prettyfiedProviderNames: { [key in ModelProviderIdType]: string } = {
anthropic: "Anthropic",
mistral: "Mistral AI",
google_ai_studio: "Google",
togetherai: "TogetherAI",
};

const modelProviders: Record<ModelProviderIdType, string[]> =
Expand Down
6 changes: 6 additions & 0 deletions sdks/js/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ const ModelProviderIdSchema = FlexibleEnumSchema([
"anthropic",
"mistral",
"google_ai_studio",
"togetherai",
]);

const ModelLLMIdSchema = FlexibleEnumSchema([
Expand All @@ -32,6 +33,10 @@ const ModelLLMIdSchema = FlexibleEnumSchema([
"codestral-latest",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
"meta-llama/Llama-3.3-70B-Instruct-Turbo",
"Qwen/Qwen2.5-Coder-32B-Instruct",
"Qwen/QwQ-32B-Preview",
"Qwen/Qwen2-72B-Instruct",
]);

const EmbeddingProviderIdSchema = FlexibleEnumSchema(["openai", "mistral"]);
Expand Down Expand Up @@ -659,6 +664,7 @@ const WhitelistableFeaturesSchema = FlexibleEnumSchema([
"zendesk_connector_feature",
"index_private_slack_channel",
"conversations_jit_actions",
"togetherai_models",
]);

export type WhitelistableFeature = z.infer<typeof WhitelistableFeaturesSchema>;
Expand Down
2 changes: 2 additions & 0 deletions types/src/front/lib/api/credentials.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ const {
DUST_MANAGED_GOOGLE_AI_STUDIO_API_KEY = "",
DUST_MANAGED_SERP_API_KEY = "",
DUST_MANAGED_BROWSERLESS_API_KEY = "",
DUST_MANAGED_TOGETHERAI_API_KEY = "",
} = process.env;

export const credentialsFromProviders = (
Expand Down Expand Up @@ -77,5 +78,6 @@ export const dustManagedCredentials = (): CredentialsType => {
GOOGLE_AI_STUDIO_API_KEY: DUST_MANAGED_GOOGLE_AI_STUDIO_API_KEY,
SERP_API_KEY: DUST_MANAGED_SERP_API_KEY,
BROWSERLESS_API_KEY: DUST_MANAGED_BROWSERLESS_API_KEY,
TOGETHERAI_API_KEY: DUST_MANAGED_TOGETHERAI_API_KEY,
};
};
77 changes: 77 additions & 0 deletions types/src/front/lib/assistant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ export const MODEL_PROVIDER_IDS = [
"anthropic",
"mistral",
"google_ai_studio",
"togetherai",
] as const;
export type ModelProviderIdType = (typeof MODEL_PROVIDER_IDS)[number];

Expand Down Expand Up @@ -108,6 +109,14 @@ export const MISTRAL_CODESTRAL_MODEL_ID = "codestral-latest" as const;
export const GEMINI_1_5_PRO_LATEST_MODEL_ID = "gemini-1.5-pro-latest" as const;
export const GEMINI_1_5_FLASH_LATEST_MODEL_ID =
"gemini-1.5-flash-latest" as const;
export const TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_ID =
"meta-llama/Llama-3.3-70B-Instruct-Turbo" as const;
export const TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_ID =
"Qwen/Qwen2.5-Coder-32B-Instruct" as const;
export const TOGETHERAI_QWEN_32B_PREVIEW_MODEL_ID =
"Qwen/QwQ-32B-Preview" as const;
export const TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_ID =
"Qwen/Qwen2-72B-Instruct" as const;

export const MODEL_IDS = [
GPT_3_5_TURBO_MODEL_ID,
Expand All @@ -130,6 +139,10 @@ export const MODEL_IDS = [
MISTRAL_CODESTRAL_MODEL_ID,
GEMINI_1_5_PRO_LATEST_MODEL_ID,
GEMINI_1_5_FLASH_LATEST_MODEL_ID,
TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_ID,
TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_ID,
TOGETHERAI_QWEN_32B_PREVIEW_MODEL_ID,
TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_ID,
] as const;
export type ModelIdType = (typeof MODEL_IDS)[number];

Expand Down Expand Up @@ -522,6 +535,66 @@ export const GEMINI_FLASH_DEFAULT_MODEL_CONFIG: ModelConfigurationType = {
supportsVision: false,
};

export const TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_CONFIG: ModelConfigurationType =
{
providerId: "togetherai",
modelId: TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_ID,
displayName: "Llama 3.3 70B Instruct Turbo",
contextSize: 128_000,
recommendedTopK: 32,
recommendedExhaustiveTopK: 128, // 65_536
largeModel: true,
description: "Meta's fast, powerful and open source model (128k context).",
shortDescription: "Meta's open source model.",
isLegacy: false,
supportsVision: false,
};

export const TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_CONFIG: ModelConfigurationType =
{
providerId: "togetherai",
modelId: TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_ID,
displayName: "Qwen 2.5 Coder 32B Instruct",
contextSize: 32_000,
recommendedTopK: 16,
recommendedExhaustiveTopK: 56, // 28_672
largeModel: false,
description: "Alibaba's fast model for coding (32k context).",
shortDescription: "Alibaba's fast coding model.",
isLegacy: false,
supportsVision: false,
};

export const TOGETHERAI_QWEN_32B_PREVIEW_MODEL_CONFIG: ModelConfigurationType =
{
providerId: "togetherai",
modelId: TOGETHERAI_QWEN_32B_PREVIEW_MODEL_ID,
displayName: "Qwen 32B Preview",
contextSize: 32_000,
recommendedTopK: 16,
recommendedExhaustiveTopK: 56, // 28_672
largeModel: false,
description: "Alibaba's fast model (32k context).",
shortDescription: "Alibaba's fast model.",
isLegacy: false,
supportsVision: false,
};

export const TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_CONFIG: ModelConfigurationType =
{
providerId: "togetherai",
modelId: TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_ID,
displayName: "Qwen 72B Instruct",
contextSize: 32_000,
recommendedTopK: 16,
recommendedExhaustiveTopK: 56, // 28_672
largeModel: false,
description: "Alibaba's powerful model (32k context).",
shortDescription: "Alibaba's powerful model.",
isLegacy: false,
supportsVision: false,
};

export const SUPPORTED_MODEL_CONFIGS: ModelConfigurationType[] = [
GPT_3_5_TURBO_MODEL_CONFIG,
GPT_4_TURBO_MODEL_CONFIG,
Expand All @@ -543,6 +616,10 @@ export const SUPPORTED_MODEL_CONFIGS: ModelConfigurationType[] = [
MISTRAL_CODESTRAL_MODEL_CONFIG,
GEMINI_PRO_DEFAULT_MODEL_CONFIG,
GEMINI_FLASH_DEFAULT_MODEL_CONFIG,
TOGETHERAI_LLAMA_3_3_70B_INSTRUCT_TURBO_MODEL_CONFIG,
TOGETHERAI_QWEN_2_5_CODER_32B_INSTRUCT_MODEL_CONFIG,
TOGETHERAI_QWEN_32B_PREVIEW_MODEL_CONFIG,
TOGETHERAI_QWEN_72B_INSTRUCT_MODEL_CONFIG,
];

export type ModelConfig = (typeof SUPPORTED_MODEL_CONFIGS)[number];
Expand Down
1 change: 1 addition & 0 deletions types/src/shared/feature_flags.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export const WHITELISTABLE_FEATURES = [
"index_private_slack_channel",
"conversations_jit_actions",
"labs_trackers",
"togetherai_models",
] as const;
export type WhitelistableFeature = (typeof WHITELISTABLE_FEATURES)[number];
export function isWhitelistableFeature(
Expand Down

0 comments on commit 17c8086

Please sign in to comment.