From f7346692afaade9c85ae8e951859ec4da2847099 Mon Sep 17 00:00:00 2001 From: Rob Lourens Date: Tue, 22 Oct 2024 13:56:55 -0700 Subject: [PATCH] Fix countMessageTokens (#124) * Fix countMessageTokens * Version bump --- package.json | 2 +- src/base/tokenizer/tokenizer.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index f0d6db3..c4768e9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@vscode/prompt-tsx", - "version": "0.3.0-alpha.10", + "version": "0.3.0-alpha.11", "description": "Declare LLM prompts with TSX", "main": "./dist/base/index.js", "types": "./dist/base/index.d.ts", diff --git a/src/base/tokenizer/tokenizer.ts b/src/base/tokenizer/tokenizer.ts index 9267159..1e050ab 100644 --- a/src/base/tokenizer/tokenizer.ts +++ b/src/base/tokenizer/tokenizer.ts @@ -33,10 +33,10 @@ export class AnyTokenizer implements ITokenizer { } async countMessageTokens(message: ChatMessage): Promise { - const vscode = require('vscode'); + const vscode = await import('vscode'); return this.countTokens({ role: this.toChatRole(message.role), - content: new vscode.LanguageModelTextPart(message.content), + content: [new vscode.LanguageModelTextPart(message.content)], name: 'name' in message ? message.name : undefined, }); }