Skip to content

Commit

Permalink
Bump Mistral 7B context size to 8192 tokens (#2140)
Browse files Browse the repository at this point in the history
  • Loading branch information
spolu authored Oct 17, 2023
1 parent 9968e99 commit e3630a7
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions core/src/providers/textsynth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -796,8 +796,8 @@ impl LLM for TextSynthLLM {

fn context_size(&self) -> usize {
match self.id.as_str() {
"mistral_7B" => 4096,
"mistral_7B_instruct" => 4096,
"mistral_7B" => 8192,
"mistral_7B_instruct" => 8192,
"falcon_7B" => 2048,
"falcon_40B" => 2048,
"falcon_40B-chat" => 2048,
Expand Down
4 changes: 2 additions & 2 deletions front/lib/assistant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ export const MISTRAL_7B_DEFAULT_MODEL_CONFIG = {
providerId: "textsynth",
modelId: "mistral_7B_instruct",
displayName: "Mistral 7B",
contextSize: 4096,
contextSize: 8192,
largeModel: false,
recommendedTopK: 8,
recommendedTopK: 16,
} as const;

export const SUPPORTED_MODEL_CONFIGS = [
Expand Down

0 comments on commit e3630a7

Please sign in to comment.