Skip to content

Commit

Permalink
feat: replace o1-preview by o1 (#9596)
Browse files Browse the repository at this point in the history
Co-authored-by: Henry Fontanier <[email protected]>
  • Loading branch information
fontanierh and Henry Fontanier authored Dec 22, 2024
1 parent df938de commit 9cdc903
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 25 deletions.
35 changes: 23 additions & 12 deletions core/src/providers/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ pub enum OpenAIChatMessageRole {
Assistant,
Function,
System,
Developer,
Tool,
User,
}
Expand Down Expand Up @@ -242,6 +243,7 @@ impl From<OpenAIChatMessageRole> for ChatMessageRole {
OpenAIChatMessageRole::Assistant => ChatMessageRole::Assistant,
OpenAIChatMessageRole::Function => ChatMessageRole::Function,
OpenAIChatMessageRole::System => ChatMessageRole::System,
OpenAIChatMessageRole::Developer => ChatMessageRole::System,
OpenAIChatMessageRole::Tool => ChatMessageRole::Function,
OpenAIChatMessageRole::User => ChatMessageRole::User,
}
Expand Down Expand Up @@ -1719,14 +1721,23 @@ pub fn to_openai_messages(
messages: &Vec<ChatMessage>,
model_id: &str,
) -> Result<Vec<OpenAIChatMessage>, anyhow::Error> {
messages
let mut oai_messages = messages
.iter()
.filter_map(|m| match m {
// [o1-preview] Hack for OpenAI `o1-*` models to exclude system messages.
ChatMessage::System(_) if model_id.starts_with("o1-") => None,
_ => Some(OpenAIChatMessage::try_from(m)),
})
.collect::<Result<Vec<_>>>()
.map(|m| OpenAIChatMessage::try_from(m))
.collect::<Result<Vec<_>>>()?
.into_iter()
// [o1-mini] O1 mini does not support system messages, so we filter them out.
.filter(|m| m.role != OpenAIChatMessageRole::System || !model_id.starts_with("o1-mini"))
.collect::<Vec<_>>();

// [o1] O1 uses `developer` messages instead of `system` messages.
for m in oai_messages.iter_mut() {
if m.role == OpenAIChatMessageRole::System && model_id.starts_with("o1") {
m.role = OpenAIChatMessageRole::Developer;
}
}

Ok(oai_messages)
}

#[async_trait]
Expand Down Expand Up @@ -1795,8 +1806,8 @@ impl LLM for OpenAILLM {
}
}

// [o1-preview] Hack for OpenAI `o1-*` models to not use streaming.
let model_is_o1 = self.id.as_str().starts_with("o1-");
// [o1] Hack for OpenAI `o1*` models to not use streaming.
let model_is_o1 = self.id.as_str().starts_with("o1");
let (c, request_id) = if !model_is_o1 && event_sender.is_some() {
if n > 1 {
return Err(anyhow!(
Expand Down Expand Up @@ -2024,9 +2035,9 @@ impl LLM for OpenAILLM {

let openai_messages = to_openai_messages(messages, &self.id)?;

// [o1-preview] Hack for OpenAI `o1-*` models to simulate streaming.
// [o1] Hack for OpenAI `o1*` models to simulate streaming.
let is_streaming = event_sender.is_some();
let model_is_o1 = self.id.as_str().starts_with("o1-");
let model_is_o1 = self.id.as_str().starts_with("o1");

let (c, request_id) = if !model_is_o1 && is_streaming {
streamed_chat_completion(
Expand Down Expand Up @@ -2089,7 +2100,7 @@ impl LLM for OpenAILLM {
.await?
};

// [o1-preview] Hack for OpenAI `o1-*` models to simulate streaming.
// [o1] Hack for OpenAI `o1*` models to simulate streaming.
if model_is_o1 && is_streaming {
let sender = event_sender.as_ref().unwrap();
for choice in &c.choices {
Expand Down
10 changes: 5 additions & 5 deletions front/lib/api/assistant/global_agents.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import {
MISTRAL_MEDIUM_MODEL_CONFIG,
MISTRAL_SMALL_MODEL_CONFIG,
O1_MINI_MODEL_CONFIG,
O1_PREVIEW_MODEL_CONFIG,
O1_MODEL_CONFIG,
} from "@dust-tt/types";
import assert from "assert";

Expand Down Expand Up @@ -326,16 +326,16 @@ function _getO1PreviewGlobalAgent({
version: 0,
versionCreatedAt: null,
versionAuthorId: null,
name: "o1-preview",
description: O1_PREVIEW_MODEL_CONFIG.description,
name: "o1",
description: O1_MODEL_CONFIG.description,
instructions: null,
pictureUrl: "https://dust.tt/static/systemavatar/o1_avatar_full.png",
status,
scope: "global",
userFavorite: false,
model: {
providerId: O1_PREVIEW_MODEL_CONFIG.providerId,
modelId: O1_PREVIEW_MODEL_CONFIG.modelId,
providerId: O1_MODEL_CONFIG.providerId,
modelId: O1_MODEL_CONFIG.modelId,
temperature: 1, // 1 is forced for O1
},
actions: [],
Expand Down
2 changes: 1 addition & 1 deletion sdks/js/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ const ModelLLMIdSchema = FlexibleEnumSchema<
| "gpt-4o-2024-08-06"
| "gpt-4o"
| "gpt-4o-mini"
| "o1-preview"
| "o1"
| "o1-mini"
| "claude-3-opus-20240229"
| "claude-3-5-sonnet-20240620"
Expand Down
14 changes: 7 additions & 7 deletions types/src/front/lib/assistant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export const GPT_4_TURBO_MODEL_ID = "gpt-4-turbo" as const;
export const GPT_4O_MODEL_ID = "gpt-4o" as const;
export const GPT_4O_20240806_MODEL_ID = "gpt-4o-2024-08-06" as const;
export const GPT_4O_MINI_MODEL_ID = "gpt-4o-mini" as const;
export const O1_PREVIEW_MODEL_ID = "o1-preview" as const;
export const O1_MODEL_ID = "o1" as const;
export const O1_MINI_MODEL_ID = "o1-mini" as const;
export const CLAUDE_3_OPUS_2024029_MODEL_ID = "claude-3-opus-20240229" as const;
export const CLAUDE_3_5_SONNET_20240620_MODEL_ID =
Expand Down Expand Up @@ -124,7 +124,7 @@ export const MODEL_IDS = [
GPT_4O_MODEL_ID,
GPT_4O_20240806_MODEL_ID,
GPT_4O_MINI_MODEL_ID,
O1_PREVIEW_MODEL_ID,
O1_MODEL_ID,
O1_MINI_MODEL_ID,
CLAUDE_3_OPUS_2024029_MODEL_ID,
CLAUDE_3_5_SONNET_20240620_MODEL_ID,
Expand Down Expand Up @@ -267,11 +267,11 @@ export const GPT_4O_MINI_MODEL_CONFIG: ModelConfigurationType = {
toolUseMetaPrompt: LEGACY_OPEN_AI_TOOL_USE_META_PROMPT,
supportsVision: true,
};
export const O1_PREVIEW_MODEL_CONFIG: ModelConfigurationType = {
export const O1_MODEL_CONFIG: ModelConfigurationType = {
providerId: "openai",
modelId: O1_PREVIEW_MODEL_ID,
displayName: "O1 Preview",
contextSize: 128_000,
modelId: O1_MODEL_ID,
displayName: "O1",
contextSize: 200_000,
recommendedTopK: 32,
recommendedExhaustiveTopK: 128, // 65_536
largeModel: true,
Expand Down Expand Up @@ -601,7 +601,7 @@ export const SUPPORTED_MODEL_CONFIGS: ModelConfigurationType[] = [
GPT_4O_MODEL_CONFIG,
GPT_4O_20240806_MODEL_CONFIG,
GPT_4O_MINI_MODEL_CONFIG,
O1_PREVIEW_MODEL_CONFIG,
O1_MODEL_CONFIG,
O1_MINI_MODEL_CONFIG,
CLAUDE_3_OPUS_DEFAULT_MODEL_CONFIG,
CLAUDE_3_5_SONNET_20240620_DEPRECATED_MODEL_CONFIG,
Expand Down

0 comments on commit 9cdc903

Please sign in to comment.