From 2de06ad00bf0e6d4cb2802b37fec8d723161f796 Mon Sep 17 00:00:00 2001 From: Kai Wang Date: Fri, 7 Jun 2024 11:38:27 +0900 Subject: [PATCH] community[minor]: Fixed ChatWebLLM reload function and updated model name in example (#5671) * [fix] Fix parameter order of reload and update model name in example * Update example --------- Co-authored-by: jacoblee93 --- examples/src/models/chat/integration_webllm.ts | 2 +- libs/langchain-community/src/chat_models/webllm.ts | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/src/models/chat/integration_webllm.ts b/examples/src/models/chat/integration_webllm.ts index 83e0324f81a1..684e29f4fa13 100644 --- a/examples/src/models/chat/integration_webllm.ts +++ b/examples/src/models/chat/integration_webllm.ts @@ -13,7 +13,7 @@ import { HumanMessage } from "@langchain/core/messages"; // Or by importing it via: // import { prebuiltAppConfig } from "@mlc-ai/web-llm"; const model = new ChatWebLLM({ - model: "Phi2-q4f32_1", + model: "Phi-3-mini-4k-instruct-q4f16_1-MLC", chatOptions: { temperature: 0.5, }, diff --git a/libs/langchain-community/src/chat_models/webllm.ts b/libs/langchain-community/src/chat_models/webllm.ts index e3ecda6bd1c1..4edf6cc46375 100644 --- a/libs/langchain-community/src/chat_models/webllm.ts +++ b/libs/langchain-community/src/chat_models/webllm.ts @@ -28,7 +28,7 @@ export interface WebLLMCallOptions extends BaseLanguageModelCallOptions {} * ```typescript * // Initialize the ChatWebLLM model with the model record. * const model = new ChatWebLLM({ - * model: "Phi2-q4f32_1", + * model: "Phi-3-mini-4k-instruct-q4f16_1-MLC", * chatOptions: { * temperature: 0.5, * }, @@ -79,8 +79,8 @@ export class ChatWebLLM extends SimpleChatModel { async reload( modelId: string, - newAppConfig?: webllm.AppConfig, - newChatOpts?: webllm.ChatOptions + newChatOpts?: webllm.ChatOptions, + newAppConfig?: webllm.AppConfig ) { await this.engine.reload(modelId, newChatOpts, newAppConfig); }