Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/lobehub/lobe-chat
Browse files Browse the repository at this point in the history
  • Loading branch information
actions-user committed Jan 31, 2025
2 parents f917bda + 4032658 commit 0421df0
Show file tree
Hide file tree
Showing 5 changed files with 244 additions and 7 deletions.
25 changes: 25 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,31 @@

# Changelog

### [Version 1.49.6](https://github.com/lobehub/lobe-chat/compare/v1.49.5...v1.49.6)

<sup>Released on **2025-01-30**</sup>

#### 🐛 Bug Fixes

- **misc**: Support litellm reasoning streaming.

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

#### What's fixed

- **misc**: Support litellm reasoning streaming, closes [#5632](https://github.com/lobehub/lobe-chat/issues/5632) ([9942fb3](https://github.com/lobehub/lobe-chat/commit/9942fb3))

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.49.5](https://github.com/lobehub/lobe-chat/compare/v1.49.4...v1.49.5)

<sup>Released on **2025-01-28**</sup>
Expand Down
7 changes: 7 additions & 0 deletions changelog/v1.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
[
{
"children": {
"fixes": ["Support litellm reasoning streaming."]
},
"date": "2025-01-30",
"version": "1.49.6"
},
{
"children": {
"fixes": ["Pin @clerk/[email protected] to avoid build error."]
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@lobehub/chat",
"version": "1.49.5",
"version": "1.49.6",
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
"keywords": [
"framework",
Expand Down
202 changes: 201 additions & 1 deletion src/libs/agent-runtime/utils/streams/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ describe('OpenAIStream', () => {
});

describe('Reasoning', () => {
it('should handle reasoning event', async () => {
it('should handle reasoning event in official DeepSeek api', async () => {
const data = [
{
id: '1',
Expand Down Expand Up @@ -722,6 +722,206 @@ describe('OpenAIStream', () => {
chunks.push(decoder.decode(chunk, { stream: true }));
}

expect(chunks).toEqual(
[
'id: 1',
'event: reasoning',
`data: ""\n`,
'id: 1',
'event: reasoning',
`data: "您好"\n`,
'id: 1',
'event: reasoning',
`data: "!"\n`,
'id: 1',
'event: text',
`data: "你好"\n`,
'id: 1',
'event: text',
`data: "很高兴"\n`,
'id: 1',
'event: text',
`data: "为您"\n`,
'id: 1',
'event: text',
`data: "提供"\n`,
'id: 1',
'event: text',
`data: "帮助。"\n`,
'id: 1',
'event: stop',
`data: "stop"\n`,
].map((i) => `${i}\n`),
);
});
it('should handle reasoning in litellm', async () => {
const data = [
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { role: 'assistant', reasoning_content: '' },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { reasoning_content: '您好' },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { reasoning_content: '!' },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { content: '你好', reasoning_content: null },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { content: '很高兴', reasoning_cont: null },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { content: '为您', reasoning_content: null },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { content: '提供', reasoning_content: null },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { content: '帮助。', reasoning_content: null },
logprobs: null,
finish_reason: null,
},
],
},
{
id: '1',
object: 'chat.completion.chunk',
created: 1737563070,
model: 'deepseek-reasoner',
system_fingerprint: 'fp_1c5d8833bc',
choices: [
{
index: 0,
delta: { content: '', reasoning_content: null },
logprobs: null,
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 6,
completion_tokens: 104,
total_tokens: 110,
prompt_tokens_details: { cached_tokens: 0 },
completion_tokens_details: { reasoning_tokens: 70 },
prompt_cache_hit_tokens: 0,
prompt_cache_miss_tokens: 6,
},
},
];

const mockOpenAIStream = new ReadableStream({
start(controller) {
data.forEach((chunk) => {
controller.enqueue(chunk);
});

controller.close();
},
});

const protocolStream = OpenAIStream(mockOpenAIStream);

const decoder = new TextDecoder();
const chunks = [];

// @ts-ignore
for await (const chunk of protocolStream) {
chunks.push(decoder.decode(chunk, { stream: true }));
}

expect(chunks).toEqual(
[
'id: 1',
Expand Down
15 changes: 10 additions & 5 deletions src/libs/agent-runtime/utils/streams/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,18 @@ export const transformOpenAIStream = (
return { data: item.delta.content, id: chunk.id, type: 'text' };
}

// DeepSeek reasoner 会将 thinking 放在 reasoning_content 字段中
// litellm 处理 reasoning content 时 不会设定 content = null
if (
item.delta &&
'reasoning_content' in item.delta &&
typeof item.delta.reasoning_content === 'string'
) {
return { data: item.delta.reasoning_content, id: chunk.id, type: 'reasoning' };
}

// 无内容情况
if (item.delta && item.delta.content === null) {
// deepseek reasoner 会将 thinking 放在 reasoning_content 字段中
if ('reasoning_content' in item.delta && typeof item.delta.reasoning_content === 'string') {
return { data: item.delta.reasoning_content, id: chunk.id, type: 'reasoning' };
}

return { data: item.delta, id: chunk.id, type: 'data' };
}

Expand Down

0 comments on commit 0421df0

Please sign in to comment.