Skip to content

Commit

Permalink
feat: report omitted references (#72)
Browse files Browse the repository at this point in the history
  • Loading branch information
joyceerhl authored Jul 20, 2024
1 parent 994b1db commit 63aee97
Show file tree
Hide file tree
Showing 5 changed files with 43 additions and 9 deletions.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@vscode/prompt-tsx",
"version": "0.2.5-alpha",
"version": "0.2.6-alpha",
"description": "Declare LLM prompts with TSX",
"main": "./dist/base/index.js",
"types": "./dist/base/index.d.ts",
Expand Down
35 changes: 31 additions & 4 deletions src/base/promptRenderer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ export interface RenderPromptResult {
* The references that survived prioritization in the rendered {@link RenderPromptResult.messages messages}.
*/
readonly references: PromptReference[];

/**
* The references attached to chat message chunks that did not survive prioritization.
*/
readonly omittedReferences: PromptReference[];
}

export type QueueItem<C, P> = {
Expand Down Expand Up @@ -212,6 +217,7 @@ export class PromptRenderer<P extends BasePromptElementProps> {
prioritizedChunks.sort((a, b) => cmp(things[a.index], things[b.index]));

let remainingBudget = this._endpoint.modelMaxPromptTokens;
const omittedChunks: T[] = [];
while (prioritizedChunks.length > 0) {
const prioritizedChunk = prioritizedChunks.shift()!;
const index = prioritizedChunk.index;
Expand All @@ -224,6 +230,7 @@ export class PromptRenderer<P extends BasePromptElementProps> {
}
if (tokenCount > remainingBudget) {
// Wouldn't fit anymore
omittedChunks.push(chunk);
break;
}
chunkResult[index] = chunk;
Expand All @@ -233,7 +240,13 @@ export class PromptRenderer<P extends BasePromptElementProps> {
remainingBudget -= tokenCount;
}

return { result: coalesce(chunkResult), tokenCount: this._endpoint.modelMaxPromptTokens - remainingBudget };
for (const omittedChunk of prioritizedChunks) {
const index = omittedChunk.index;
const chunk = things[index];
omittedChunks.push(chunk);
}

return { result: coalesce(chunkResult), tokenCount: this._endpoint.modelMaxPromptTokens - remainingBudget, omittedChunks };
}

/**
Expand All @@ -256,7 +269,7 @@ export class PromptRenderer<P extends BasePromptElementProps> {
// First pass: sort message chunks by priority. Note that this can yield an imprecise result due to token boundaries within a single chat message
// so we also need to do a second pass over the full chat messages later
const chunkMessages = new Set<MaterializedChatMessage>();
const { result: prioritizedChunks } = await this._prioritize(
const { result: prioritizedChunks, omittedChunks } = await this._prioritize(
resultChunks,
(a, b) => MaterializedChatMessageTextChunk.cmp(a, b),
async (chunk) => {
Expand Down Expand Up @@ -298,7 +311,7 @@ export class PromptRenderer<P extends BasePromptElementProps> {
const messageResult = prioritizedMaterializedChatMessages.map(message => message?.toChatMessage());

// Remove undefined and duplicate references
const { references } = prioritizedMaterializedChatMessages.reduce<{ references: PromptReference[], names: Set<string> }>((acc, message) => {
const { references, names } = prioritizedMaterializedChatMessages.reduce<{ references: PromptReference[], names: Set<string> }>((acc, message) => {
[...this._references, ...message.references].forEach((ref) => {
const isVariableName = 'variableName' in ref.anchor;
if (isVariableName && !acc.names.has(ref.anchor.variableName)) {
Expand All @@ -311,7 +324,21 @@ export class PromptRenderer<P extends BasePromptElementProps> {
return acc;
}, { references: [], names: new Set<string>() });

return { messages: messageResult, hasIgnoredFiles: this._ignoredFiles.length > 0, tokenCount, references: coalesce(references) };
// Collect the references for chat message chunks that did not survive prioritization
const { references: omittedReferences } = omittedChunks.reduce<{ references: PromptReference[] }>((acc, message) => {
message.references.forEach((ref) => {
const isVariableName = 'variableName' in ref.anchor;
if (isVariableName && !names.has(ref.anchor.variableName)) {
acc.references.push(ref);
names.add(ref.anchor.variableName);
} else if (!isVariableName) {
acc.references.push(ref);
}
});
return acc;
}, { references: [] });

return { messages: messageResult, hasIgnoredFiles: this._ignoredFiles.length > 0, tokenCount, references: coalesce(references), omittedReferences: coalesce(omittedReferences) };
}

private _handlePromptChildren(element: QueueItem<PromptElementCtor<any, any>, P>, pieces: ProcessedPromptPiece[], sizing: PromptSizingContext, progress: Progress<ChatResponsePart> | undefined, token: CancellationToken | undefined) {
Expand Down
9 changes: 8 additions & 1 deletion src/base/results.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,19 @@ export abstract class PromptMetadata {
}
}

export enum ChatResponseReferencePartStatusKind {
Complete = 1,
Partial = 2,
Omitted = 3
}

/**
* A reference used for creating the prompt.
*/
export class PromptReference {
constructor(
readonly anchor: Uri | Location | { variableName: string; value?: Uri | Location },
readonly iconPath?: Uri | ThemeIcon | { light: Uri; dark: Uri }
readonly iconPath?: Uri | ThemeIcon | { light: Uri; dark: Uri },
readonly options?: { status?: { description: string; kind: ChatResponseReferencePartStatusKind } }
) { }
}
2 changes: 1 addition & 1 deletion src/base/test/renderer.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -893,7 +893,7 @@ LOW MED 00 01 02 03 04 05 06 07 08 09
const res = await inst.render(undefined, undefined);
assert.equal(res.messages.length, 1);
assert.equal(res.references.length, 0);

assert.equal(res.omittedReferences.length, 1);
});

test('reports references under nested extrinsics', async () => {
Expand Down

0 comments on commit 63aee97

Please sign in to comment.