diff --git a/src/libs/agent-runtime/utils/streams/openai.test.ts b/src/libs/agent-runtime/utils/streams/openai.test.ts index 3a26ba3c3829a..fc056db56c64e 100644 --- a/src/libs/agent-runtime/utils/streams/openai.test.ts +++ b/src/libs/agent-runtime/utils/streams/openai.test.ts @@ -754,6 +754,7 @@ describe('OpenAIStream', () => { ].map((i) => `${i}\n`), ); }); + it('should handle reasoning in litellm', async () => { const data = [ { @@ -954,5 +955,206 @@ describe('OpenAIStream', () => { ].map((i) => `${i}\n`), ); }); + + it('should handle reasoning in siliconflow', async () => { + const data = [ + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { role: 'assistant', reasoning_content: '', content: '' }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { reasoning_content: '您好', content: '' }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { reasoning_content: '!', content: '' }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { content: '你好', reasoning_content: null }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { content: '很高兴', reasoning_cont: null }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { content: '为您', reasoning_content: null }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { content: '提供', reasoning_content: null }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { content: '帮助。', reasoning_content: null }, + logprobs: null, + finish_reason: null, + }, + ], + }, + { + id: '1', + object: 'chat.completion.chunk', + created: 1737563070, + model: 'deepseek-reasoner', + system_fingerprint: 'fp_1c5d8833bc', + choices: [ + { + index: 0, + delta: { content: '', reasoning_content: null }, + logprobs: null, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 6, + completion_tokens: 104, + total_tokens: 110, + prompt_tokens_details: { cached_tokens: 0 }, + completion_tokens_details: { reasoning_tokens: 70 }, + prompt_cache_hit_tokens: 0, + prompt_cache_miss_tokens: 6, + }, + }, + ]; + + const mockOpenAIStream = new ReadableStream({ + start(controller) { + data.forEach((chunk) => { + controller.enqueue(chunk); + }); + + controller.close(); + }, + }); + + const protocolStream = OpenAIStream(mockOpenAIStream); + + const decoder = new TextDecoder(); + const chunks = []; + + // @ts-ignore + for await (const chunk of protocolStream) { + chunks.push(decoder.decode(chunk, { stream: true })); + } + + expect(chunks).toEqual( + [ + 'id: 1', + 'event: reasoning', + `data: ""\n`, + 'id: 1', + 'event: reasoning', + `data: "您好"\n`, + 'id: 1', + 'event: reasoning', + `data: "!"\n`, + 'id: 1', + 'event: text', + `data: "你好"\n`, + 'id: 1', + 'event: text', + `data: "很高兴"\n`, + 'id: 1', + 'event: text', + `data: "为您"\n`, + 'id: 1', + 'event: text', + `data: "提供"\n`, + 'id: 1', + 'event: text', + `data: "帮助。"\n`, + 'id: 1', + 'event: stop', + `data: "stop"\n`, + ].map((i) => `${i}\n`), + ); + }); }); }); diff --git a/src/libs/agent-runtime/utils/streams/openai.ts b/src/libs/agent-runtime/utils/streams/openai.ts index 4edcd3303112e..3ec8b1f7f4543 100644 --- a/src/libs/agent-runtime/utils/streams/openai.ts +++ b/src/libs/agent-runtime/utils/streams/openai.ts @@ -37,9 +37,8 @@ export const transformOpenAIStream = ( return { data: errorData, id: 'first_chunk_error', type: 'error' }; } - // maybe need another structure to add support for multiple choices - try { + // maybe need another structure to add support for multiple choices const item = chunk.choices[0]; if (!item) { return { data: chunk, id: chunk.id, type: 'data' }; @@ -88,12 +87,10 @@ export const transformOpenAIStream = ( return { data: item.finish_reason, id: chunk.id, type: 'stop' }; } - if (typeof item.delta?.content === 'string') { - return { data: item.delta.content, id: chunk.id, type: 'text' }; - } - - // DeepSeek reasoner 会将 thinking 放在 reasoning_content 字段中 - // litellm 处理 reasoning content 时 不会设定 content = null + // DeepSeek reasoner will put thinking in the reasoning_content field + // litellm will not set content = null when processing reasoning content + // en: siliconflow has encountered a situation where both content and reasoning_content are present, so the parsing order go ahead + // refs: https://github.com/lobehub/lobe-chat/issues/5681 if ( item.delta && 'reasoning_content' in item.delta && @@ -102,6 +99,10 @@ export const transformOpenAIStream = ( return { data: item.delta.reasoning_content, id: chunk.id, type: 'reasoning' }; } + if (typeof item.delta?.content === 'string') { + return { data: item.delta.content, id: chunk.id, type: 'text' }; + } + // 无内容情况 if (item.delta && item.delta.content === null) { return { data: item.delta, id: chunk.id, type: 'data' };