From 28e36a6ad7d33013207ff924894c52767d529c57 Mon Sep 17 00:00:00 2001 From: Hans Date: Tue, 30 Apr 2024 02:59:19 +0800 Subject: [PATCH] feat: async load binding --- README.md | 2 +- lib/binding.ts | 6 +++--- lib/index.ts | 10 ++++++---- test/index.test.ts | 2 +- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 0a92739..8664935 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ npm install @fugood/llama.node import { loadModel } from '@fugood/llama.node' // Initial a Llama context with the model (may take a while) -const context = loadModel({ +const context = await loadModel({ model: 'path/to/gguf/model', use_mlock: true, n_ctx: 2048, diff --git a/lib/binding.ts b/lib/binding.ts index 54692ef..a4e21b8 100644 --- a/lib/binding.ts +++ b/lib/binding.ts @@ -48,11 +48,11 @@ export interface Module { LlamaContext: LlamaContext } -export const loadModule = (variant?: string): Module => { +export const loadModule = async (variant?: string): Promise => { try { if (variant) { - return require(`../bin/${process.platform}-${variant}/${process.arch}/llama-node.node`) as Module + return await import(`../bin/${process.platform}-${variant}/${process.arch}/llama-node.node`) as Module } } catch {} // ignore errors and try the common path - return require(`../bin/${process.platform}/${process.arch}/llama-node.node`) as Module + return await import(`../bin/${process.platform}/${process.arch}/llama-node.node`) as Module } \ No newline at end of file diff --git a/lib/index.ts b/lib/index.ts index fb0837a..49c9f54 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -1,5 +1,5 @@ import { loadModule, LlamaModelOptions } from './binding' -import type { LlamaContext } from './binding' +import type { Module, LlamaContext } from './binding' export * from './binding' @@ -7,7 +7,9 @@ export interface LlamaModelOptionsExtended extends LlamaModelOptions { lib_variant?: string } -export const loadModel = (options: LlamaModelOptionsExtended): LlamaContext => { - const { LlamaContext } = loadModule(options.lib_variant) - return new LlamaContext(options) +let module: Module | null = null + +export const loadModel = async (options: LlamaModelOptionsExtended): Promise => { + module ??= await loadModule(options.lib_variant) + return new module.LlamaContext(options) } diff --git a/test/index.test.ts b/test/index.test.ts index beb4200..8e4f21f 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -2,7 +2,7 @@ import path from 'path' import { loadModel } from '../lib' it('work fine', async () => { - const model = loadModel({ model: path.resolve(__dirname, './tiny-random-llama.gguf') }) + const model = await loadModel({ model: path.resolve(__dirname, './tiny-random-llama.gguf') }) const result = await model.completion({ prompt: 'My name is Merve and my favorite', n_samples: 1,