diff --git a/docs/api.md b/docs/api.md index 1c9e9c1..3d29db8 100644 --- a/docs/api.md +++ b/docs/api.md @@ -73,18 +73,18 @@ while (true) { ### Advanced API This API is only available only in Node.js. +[demo](../examples/remotecall.js) -```ts -import {RemoteCatAI} from 'catai'; -import progress from 'progress-stream'; +```js +import { RemoteCatAI } from "catai"; -const catai = new RemoteCatAI('ws://localhost:3000'); +const catai = new RemoteCatAI("ws://localhost:3000"); -const response = await catai.prompt('Write me 100 words story', token => { - progress.stdout.write(token); +const response = await catai.prompt("Write me 100 words story", (token) => { + process.stdout.write(token); }); console.log(`Total text length: ${response.length}`); - catai.close(); + ``` diff --git a/docs/configuration.md b/docs/configuration.md index bd7ad14..1499fc6 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -20,7 +20,7 @@ You can config the model by the following steps: [LLamaChatPromptOptions](https://withcatai.github.io/node-llama-cpp/api/type-aliases/LLamaChatPromptOptions) - You can edit the [systemPrompt](system_prompt.md) of the chat too. + You can edit the [systemPrompt](system-prompt.md) of the chat too. 3. Restart the server. diff --git a/docs/system_prompt.md b/docs/system-prompt.md similarity index 59% rename from docs/system_prompt.md rename to docs/system-prompt.md index 9bfd2b5..03f822b 100644 --- a/docs/system_prompt.md +++ b/docs/system-prompt.md @@ -1,15 +1,15 @@ -# CatAi system_prompt +# CatAi system-prompt According to https://withcatai.github.io/node-llama-cpp/api/type-aliases/LlamaChatSessionOptions, -it is possible to modify the system_prompt of a chat. +it is possible to modify the system-prompt of a chat. This can be achieved by adding a systemPrompt key in modelSettings -![catAi systemPrompt settings](system_prompt/settings.png) +![CatAi systemPrompt settings](system-prompt/settings.png) Save and restart to apply. Then the chat act like a pirate according to the systemPrompt you choose ;-) -![catAi systemPrompt demo](system_prompt/demo.png) \ No newline at end of file +![CatAi systemPrompt demo](system-prompt/demo.png) \ No newline at end of file diff --git a/docs/system_prompt/demo.png b/docs/system-prompt/demo.png similarity index 100% rename from docs/system_prompt/demo.png rename to docs/system-prompt/demo.png diff --git a/docs/system_prompt/settings.png b/docs/system-prompt/settings.png similarity index 100% rename from docs/system_prompt/settings.png rename to docs/system-prompt/settings.png diff --git a/examples/remotecall.js b/examples/remotecall.js new file mode 100644 index 0000000..7eb626f --- /dev/null +++ b/examples/remotecall.js @@ -0,0 +1,13 @@ +import { RemoteCatAI } from "catai"; + +const catai = new RemoteCatAI("ws://localhost:3000"); + +catai.on("open", async () => { + console.log("Connected"); + const response = await catai.prompt("Write me 100 words story", (token) => { + process.stdout.write(token); + }); + + console.log(`Total text length: ${response.length}`); + catai.close(); +}); diff --git a/server/src/server/remote/remote-catai.ts b/server/src/server/remote/remote-catai.ts index 044dcbd..95b4acf 100644 --- a/server/src/server/remote/remote-catai.ts +++ b/server/src/server/remote/remote-catai.ts @@ -1,10 +1,11 @@ -import WebSocket, {ClientOptions} from 'ws'; -import {ClientRequestArgs} from 'http'; -import {ChatContext} from '../../manage-models/bind-class/chat-context.js'; +import WebSocket, { ClientOptions } from 'ws'; +import { ClientRequestArgs } from 'http'; +import { ChatContext } from '../../manage-models/bind-class/chat-context.js'; export default class RemoteCatAI extends ChatContext { private _ws: WebSocket; private _closed = false; + private _promiseOpen?: Promise; /** * Connect to remote CatAI server, and use it as a chat context @@ -28,10 +29,19 @@ export default class RemoteCatAI extends ChatContext { if (this._closed) return; this.emit('error', 'Connection closed: ' + code); }); + + this._ws.on('open', () => { + this.emit("open"); + }); + + this._promiseOpen = new Promise((resolve, reject) => { + this.once('open', resolve); + this.once('error', reject); + }); } private _onMessage(message: string) { - const {event, value} = JSON.parse(message); + const { event, value } = JSON.parse(message); switch (event) { case 'token': this.emit('token', value); @@ -49,14 +59,15 @@ export default class RemoteCatAI extends ChatContext { } private _send(event: 'prompt' | 'abort', value: string) { - this._ws.send(JSON.stringify({event, value})); + this._ws.send(JSON.stringify({ event, value })); } abort(reason?: string): void { this._send('abort', reason || 'Aborted by user'); } - prompt(prompt: string, onToken?: (token: string) => void): Promise { + async prompt(prompt: string, onToken?: (token: string) => void): Promise { + await this._promiseOpen; this._send('prompt', prompt); let buildText = ''; @@ -66,7 +77,7 @@ export default class RemoteCatAI extends ChatContext { }; this.on('token', tokenEvent); - return new Promise((resolve, reject) => { + return await new Promise((resolve, reject) => { this.once('error', reject); this.once('modelResponseEnd', () => { this.off('token', tokenEvent);