{
}
// Then finalize the chat messages
- const messageResult = container.toChatMessages();
+ const messageResult = [...container.toChatMessages()];
const tokenCount = await container.tokenCount(this._tokenizer);
const remainingMetadata = [...container.allMetadata()];
@@ -646,11 +646,15 @@ class PromptTreeElement {
);
return parent;
} else {
+ let flags = 0;
+ if (this._obj instanceof LegacyPrioritization) flags |= ContainerFlags.IsLegacyPrioritization;
+ if (this._obj instanceof Chunk) flags |= ContainerFlags.IsChunk;
+
return new MaterializedContainer(
this._obj?.props.priority || 0,
this._children.map(child => child.materialize()),
this._metadata,
- this._obj instanceof LegacyPrioritization,
+ flags,
);
}
}
@@ -682,7 +686,12 @@ class PromptText {
}
public materialize() {
- return new MaterializedChatMessageTextChunk(this.text, this.priority ?? Number.MAX_SAFE_INTEGER, this.metadata || [], this.lineBreakBefore || this.childIndex === 0);
+ const lineBreak = this.lineBreakBefore
+ ? LineBreakBefore.Always
+ : this.childIndex === 0
+ ? LineBreakBefore.IfNotTextSibling
+ : LineBreakBefore.None;
+ return new MaterializedChatMessageTextChunk(this.text, this.priority ?? Number.MAX_SAFE_INTEGER, this.metadata || [], lineBreak);
}
public toJSON(): JSONT.TextJSON {
diff --git a/src/base/test/materialized.test.ts b/src/base/test/materialized.test.ts
index 50f3c9d..0aaf53b 100644
--- a/src/base/test/materialized.test.ts
+++ b/src/base/test/materialized.test.ts
@@ -3,7 +3,7 @@
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
-import { MaterializedChatMessage, MaterializedChatMessageTextChunk, MaterializedContainer } from '../materialized';
+import { LineBreakBefore, MaterializedChatMessage, MaterializedChatMessageTextChunk, MaterializedContainer } from '../materialized';
import { ChatRole } from '../openai';
import { ITokenizer } from '../tokenizer/tokenizer';
class MockTokenizer implements ITokenizer {
@@ -17,10 +17,10 @@ class MockTokenizer implements ITokenizer {
suite('Materialized', () => {
test('should calculate token count correctly', async () => {
const tokenizer = new MockTokenizer();
- const child1 = new MaterializedChatMessageTextChunk('Hello', 1, [], false);
- const child2 = new MaterializedChatMessageTextChunk('World', 1, [], false);
+ const child1 = new MaterializedChatMessageTextChunk('Hello', 1, [], LineBreakBefore.None);
+ const child2 = new MaterializedChatMessageTextChunk('World', 1, [], LineBreakBefore.None);
const message = new MaterializedChatMessage(ChatRole.User, 'user', undefined, undefined, 1, 0, [], [child1, child2]);
- const container = new MaterializedContainer(1, [message], []);
+ const container = new MaterializedContainer(1, [message], [], 0);
assert.deepStrictEqual(await container.tokenCount(tokenizer), 13);
container.removeLowestPriorityChild();
@@ -29,10 +29,10 @@ suite('Materialized', () => {
test('should calculate lower bound token count correctly', async () => {
const tokenizer = new MockTokenizer();
- const child1 = new MaterializedChatMessageTextChunk('Hello', 1, [], false);
- const child2 = new MaterializedChatMessageTextChunk('World', 1, [], false);
+ const child1 = new MaterializedChatMessageTextChunk('Hello', 1, [], LineBreakBefore.None);
+ const child2 = new MaterializedChatMessageTextChunk('World', 1, [], LineBreakBefore.None);
const message = new MaterializedChatMessage(ChatRole.User, 'user', undefined, undefined, 1, 0, [], [child1, child2]);
- const container = new MaterializedContainer(1, [message], []);
+ const container = new MaterializedContainer(1, [message], [], 0);
assert.deepStrictEqual(await container.upperBoundTokenCount(tokenizer), 13);
container.removeLowestPriorityChild();
diff --git a/src/base/test/renderer.test.tsx b/src/base/test/renderer.test.tsx
index 4b23ea7..1383c07 100644
--- a/src/base/test/renderer.test.tsx
+++ b/src/base/test/renderer.test.tsx
@@ -8,6 +8,7 @@ import { BaseTokensPerCompletion, ChatMessage, ChatRole } from '../openai';
import { PromptElement } from '../promptElement';
import {
AssistantMessage,
+ Chunk,
LegacyPrioritization,
PrioritizedList,
SystemMessage,
@@ -305,6 +306,18 @@ suite('PromptRenderer', () => {
>, ['a', 'b', 'c']);
});
+ test('chunks together', async () => {
+ await assertPruningOrder(<>
+ {
}
export interface RuntimePromptElementProps {
- children?: PromptPiece[];
+ children?: PromptPieceChild[];
}
export type PromptElementProps