Skip to content

Commit

Permalink
Adopt finalized form of tools api, content2 -> content (#122)
Browse files Browse the repository at this point in the history
* Adopt finalized form of tools api, content2 -> content

* Version bump
  • Loading branch information
roblourens authored Oct 22, 2024
1 parent f87f331 commit 194a8c3
Show file tree
Hide file tree
Showing 7 changed files with 20,074 additions and 560 deletions.
7 changes: 0 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@vscode/prompt-tsx",
"version": "0.3.0-alpha.8",
"version": "0.3.0-alpha.9",
"description": "Declare LLM prompts with TSX",
"main": "./dist/base/index.js",
"types": "./dist/base/index.d.ts",
Expand All @@ -25,7 +25,6 @@
"devDependencies": {
"@microsoft/tiktokenizer": "^1.0.6",
"@types/node": "^20.11.30",
"@types/vscode": "^1.92.0",
"@vscode/test-cli": "^0.0.9",
"@vscode/test-electron": "^2.4.1",
"concurrently": "^9.0.1",
Expand Down
12 changes: 6 additions & 6 deletions src/base/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
* Copyright (c) Microsoft Corporation and GitHub. All rights reserved.
*--------------------------------------------------------------------------------------------*/

import type { CancellationToken, ChatResponsePart, LanguageModelChat, Progress } from 'vscode';
import type { CancellationToken, ChatResponsePart, LanguageModelChat, Progress, LanguageModelChatMessage } from 'vscode';
import { PromptElementJSON } from './jsonTypes';
import { ChatMessage, ChatRole } from './openai';
import { MetadataMap, PromptRenderer } from './promptRenderer';
import { PromptReference } from './results';
import { AnyTokenizer, ITokenizer } from './tokenizer/tokenizer';
import { BasePromptElementProps, IChatEndpointInfo, PromptElementCtor } from './types';
import { ChatDocumentContext, LanguageModelChatMessage } from './vscodeTypes.d';
import { ChatDocumentContext } from './vscodeTypes.d';

export * from './htmlTracer';
export * as JSONTree from './jsonTypes';
Expand Down Expand Up @@ -213,8 +213,8 @@ export function toVsCodeChatMessages(messages: ChatMessage[]) {
m.name
);
if (m.tool_calls) {
message.content2 = [
m.content,
message.content = [
new vscode.LanguageModelTextPart(m.content),
...m.tool_calls.map(
tc =>
new vscode.LanguageModelToolCallPart(tc.function.name, tc.id, tc.function.arguments)
Expand All @@ -226,12 +226,12 @@ export function toVsCodeChatMessages(messages: ChatMessage[]) {
return vscode.LanguageModelChatMessage.User(m.content, m.name);
case ChatRole.Function: {
const message: LanguageModelChatMessage = vscode.LanguageModelChatMessage.User('');
message.content2 = [new vscode.LanguageModelToolResultPart(m.name, [new vscode.LanguageModelTextPart(m.content)])];
message.content = [new vscode.LanguageModelToolResultPart(m.name, [new vscode.LanguageModelTextPart(m.content)])];
return message;
}
case ChatRole.Tool: {
const message: LanguageModelChatMessage = vscode.LanguageModelChatMessage.User('');
message.content2 = [new vscode.LanguageModelToolResultPart(m.tool_call_id, [new vscode.LanguageModelTextPart(m.content)])];
message.content = [new vscode.LanguageModelToolResultPart(m.tool_call_id, [new vscode.LanguageModelTextPart(m.content)])];
return message;
}
default:
Expand Down
3 changes: 1 addition & 2 deletions src/base/promptElements.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@
* Copyright (c) Microsoft Corporation and GitHub. All rights reserved.
*--------------------------------------------------------------------------------------------*/

import type { CancellationToken } from 'vscode';
import type { CancellationToken, LanguageModelPromptTsxPart, LanguageModelTextPart, LanguageModelToolResult } from 'vscode';
import { contentType } from '.';
import { ChatRole } from './openai';
import { PromptElement } from './promptElement';
import { BasePromptElementProps, PromptPiece, PromptSizing } from './types';
import type { LanguageModelPromptTsxPart, LanguageModelTextPart, LanguageModelToolResult } from './vscodeTypes';
import { PromptElementJSON } from './jsonTypes';

export type ChatMessagePromptElement = SystemMessage | UserMessage | AssistantMessage;
Expand Down
7 changes: 3 additions & 4 deletions src/base/tokenizer/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@
* Copyright (c) Microsoft Corporation and GitHub. All rights reserved.
*--------------------------------------------------------------------------------------------*/

import type { CancellationToken } from 'vscode';
import type { CancellationToken, LanguageModelChatMessage } from 'vscode';
import { ChatMessage, ChatRole } from '../openai';
import type { LanguageModelChatMessage } from '../vscodeTypes';

/**
* Represents a tokenizer that can be used to tokenize text in chat messages.
Expand Down Expand Up @@ -34,10 +33,10 @@ export class AnyTokenizer implements ITokenizer {
}

async countMessageTokens(message: ChatMessage): Promise<number> {
const vscode = require('vscode');
return this.countTokens({
role: this.toChatRole(message.role),
content: message.content,
content2: [message.content],
content: vscode.LanguageModelTextPart(message.content),
name: 'name' in message ? message.name : undefined,
});
}
Expand Down
Loading

0 comments on commit 194a8c3

Please sign in to comment.