Skip to content

Commit

Permalink
chore(community/webllm): upgraded @mlc/web-llm dependency and updated…
Browse files Browse the repository at this point in the history
… it's ChatModel
  • Loading branch information
tnfssc committed Jun 2, 2024
1 parent f064ba4 commit 2afe927
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 22 deletions.
4 changes: 2 additions & 2 deletions libs/langchain-community/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
"@langchain/scripts": "~0.0.14",
"@layerup/layerup-security": "^1.5.12",
"@mendable/firecrawl-js": "^0.0.13",
"@mlc-ai/web-llm": "^0.2.35",
"@mlc-ai/web-llm": "^0.2.40",
"@mozilla/readability": "^0.4.4",
"@neondatabase/serverless": "^0.9.1",
"@notionhq/client": "^2.2.10",
Expand Down Expand Up @@ -245,7 +245,7 @@
"@huggingface/inference": "^2.6.4",
"@layerup/layerup-security": "^1.5.12",
"@mendable/firecrawl-js": "^0.0.13",
"@mlc-ai/web-llm": "^0.2.35",
"@mlc-ai/web-llm": "^0.2.40",
"@mozilla/readability": "*",
"@neondatabase/serverless": "*",
"@notionhq/client": "^2.2.10",
Expand Down
20 changes: 6 additions & 14 deletions libs/langchain-community/src/chat_models/webllm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ export interface WebLLMCallOptions extends BaseLanguageModelCallOptions {}
export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
static inputs: WebLLMInputs;

protected engine: webllm.EngineInterface;
protected engine: webllm.MLCEngine;

appConfig?: webllm.AppConfig;

Expand All @@ -63,40 +63,33 @@ export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
this.chatOptions = inputs.chatOptions;
this.model = inputs.model;
this.temperature = inputs.temperature;
this.engine = new webllm.MLCEngine();
}

_llmType() {
return "web-llm";
}

async initialize(progressCallback?: webllm.InitProgressCallback) {
this.engine = new webllm.Engine();
if (progressCallback !== undefined) {
this.engine.setInitProgressCallback(progressCallback);
}
await this.reload(this.model, this.chatOptions, this.appConfig);
this.engine.setInitProgressCallback(() => {});
}

async reload(
modelId: string,
newAppConfig?: webllm.AppConfig,
newChatOpts?: webllm.ChatOptions
) {
if (this.engine !== undefined) {
await this.engine.reload(modelId, newAppConfig, newChatOpts);
} else {
throw new Error("Initialize model before reloading.");
}
await this.engine.reload(modelId, newChatOpts, newAppConfig);
}

async *_streamResponseChunks(
messages: BaseMessage[],
options: this["ParsedCallOptions"],
runManager?: CallbackManagerForLLMRun
): AsyncGenerator<ChatGenerationChunk> {
await this.initialize();

const messagesInput: ChatCompletionMessageParam[] = messages.map(
(message) => {
if (typeof message.content !== "string") {
Expand Down Expand Up @@ -124,14 +117,13 @@ export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
}
);

const stream = this.engine.chatCompletionAsyncChunkGenerator(
const stream = await this.engine.chat.completions.create(
{
stream: true,
messages: messagesInput,
stop: options.stop,
logprobs: true,
},
{}
}
);
for await (const chunk of stream) {
// Last chunk has undefined content
Expand All @@ -146,7 +138,7 @@ export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
},
}),
});
await runManager?.handleLLMNewToken(text ?? "");
await runManager?.handleLLMNewToken(text);
}
}

Expand Down
21 changes: 15 additions & 6 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -9099,7 +9099,7 @@ __metadata:
"@langchain/scripts": ~0.0.14
"@layerup/layerup-security": ^1.5.12
"@mendable/firecrawl-js": ^0.0.13
"@mlc-ai/web-llm": ^0.2.35
"@mlc-ai/web-llm": ^0.2.40
"@mozilla/readability": ^0.4.4
"@neondatabase/serverless": ^0.9.1
"@notionhq/client": ^2.2.10
Expand Down Expand Up @@ -9266,7 +9266,7 @@ __metadata:
"@huggingface/inference": ^2.6.4
"@layerup/layerup-security": ^1.5.12
"@mendable/firecrawl-js": ^0.0.13
"@mlc-ai/web-llm": ^0.2.35
"@mlc-ai/web-llm": ^0.2.40
"@mozilla/readability": "*"
"@neondatabase/serverless": "*"
"@notionhq/client": ^2.2.10
Expand Down Expand Up @@ -10445,10 +10445,12 @@ __metadata:
languageName: node
linkType: hard

"@mlc-ai/web-llm@npm:^0.2.35":
version: 0.2.35
resolution: "@mlc-ai/web-llm@npm:0.2.35"
checksum: 03c1d1847340f88474e1eeed7a91cc09e29299a1216e378385ffe5479c203d39a8656d98c9187864322453a91f046b874d7073662ab04033527079d9bb29bee3
"@mlc-ai/web-llm@npm:^0.2.40":
version: 0.2.40
resolution: "@mlc-ai/web-llm@npm:0.2.40"
dependencies:
loglevel: ^1.9.1
checksum: 44d46178f7b7f899893ee8096fd4188b8c343589a10428c52f87b1b7e708f7a94b2b6315c8a6f8075f14d6d92aebfd8afc7f6d049a2ef60f8b8dc950b98a82e2
languageName: node
linkType: hard

Expand Down Expand Up @@ -28431,6 +28433,13 @@ __metadata:
languageName: node
linkType: hard

"loglevel@npm:^1.9.1":
version: 1.9.1
resolution: "loglevel@npm:1.9.1"
checksum: e1c8586108c4d566122e91f8a79c8df728920e3a714875affa5120566761a24077ec8ec9e5fc388b022e39fc411ec6e090cde1b5775871241b045139771eeb06
languageName: node
linkType: hard

"long@npm:*, long@npm:^5.2.1, long@npm:~5.2.3":
version: 5.2.3
resolution: "long@npm:5.2.3"
Expand Down

0 comments on commit 2afe927

Please sign in to comment.