From 59509483a02c5a6a1c01f8ebf7f93bf7f7f9a183 Mon Sep 17 00:00:00 2001 From: JerryLee <223425819+Jerry2003826@users.noreply.github.com> Date: Mon, 11 May 2026 06:06:21 +1000 Subject: [PATCH] fix(deepseek): replay reasoning content for tool calls --- packages/openai-adapters/src/apis/DeepSeek.ts | 26 ++++++- .../openai-adapters/src/test/main.test.ts | 70 +++++++++++++++++++ 2 files changed, 95 insertions(+), 1 deletion(-) diff --git a/packages/openai-adapters/src/apis/DeepSeek.ts b/packages/openai-adapters/src/apis/DeepSeek.ts index 0beba18fee1..246be196ed8 100644 --- a/packages/openai-adapters/src/apis/DeepSeek.ts +++ b/packages/openai-adapters/src/apis/DeepSeek.ts @@ -1,5 +1,9 @@ import { streamSse } from "@continuedev/fetch"; -import { ChatCompletionChunk, Model } from "openai/resources/index"; +import { + ChatCompletionChunk, + ChatCompletionCreateParams, + Model, +} from "openai/resources/index"; import { DeepseekConfig } from "../types.js"; import { chatChunk, customFetch } from "../util.js"; import { OpenAIApi } from "./OpenAI.js"; @@ -15,6 +19,26 @@ export class DeepSeekApi extends OpenAIApi { }); } + modifyChatBody(body: T): T { + const modifiedBody = super.modifyChatBody(body); + + modifiedBody.messages = modifiedBody.messages.map((message) => { + if ( + message.role !== "assistant" || + (message as any).reasoning_content !== undefined + ) { + return message; + } + + return { + ...message, + reasoning_content: "", + } as typeof message; + }) as T["messages"]; + + return modifiedBody; + } + async *fimStream( body: FimCreateParamsStreaming, signal: AbortSignal, diff --git a/packages/openai-adapters/src/test/main.test.ts b/packages/openai-adapters/src/test/main.test.ts index 8a10c8b727f..b90641630d4 100644 --- a/packages/openai-adapters/src/test/main.test.ts +++ b/packages/openai-adapters/src/test/main.test.ts @@ -235,6 +235,76 @@ describe("Configuration", () => { ); }); + it("should add empty DeepSeek reasoning_content when replaying assistant tool calls", () => { + const deepseek = constructLlmApi({ + provider: "deepseek", + apiKey: "sk-xxx", + }) as OpenAIApi; + + const body = deepseek.modifyChatBody({ + model: "deepseek-v4-flash", + stream: true, + messages: [ + { role: "user", content: "@src/file.ts please review this code" }, + { + role: "assistant", + content: "", + tool_calls: [ + { + id: "call_read", + type: "function", + function: { + name: "Read", + arguments: '{"filepath":"src/file.ts"}', + }, + }, + ], + }, + { + role: "tool", + tool_call_id: "call_read", + content: "file contents", + }, + ], + } as any); + + expect((body.messages[1] as any).reasoning_content).toBe(""); + }); + + it("should not overwrite existing DeepSeek reasoning_content", () => { + const deepseek = constructLlmApi({ + provider: "deepseek", + apiKey: "sk-xxx", + }) as OpenAIApi; + + const body = deepseek.modifyChatBody({ + model: "deepseek-v4-flash", + stream: true, + messages: [ + { role: "user", content: "Use a tool" }, + { + role: "assistant", + content: "", + reasoning_content: "I should inspect the requested file.", + tool_calls: [ + { + id: "call_read", + type: "function", + function: { + name: "Read", + arguments: '{"filepath":"src/file.ts"}', + }, + }, + ], + }, + ], + } as any); + + expect((body.messages[1] as any).reasoning_content).toBe( + "I should inspect the requested file.", + ); + }); + it("should configure Inception OpenAI client with correct apiBase and apiKey", () => { const inception = constructLlmApi({ provider: "inception",