Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion packages/openai-adapters/src/apis/DeepSeek.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import { streamSse } from "@continuedev/fetch";
import { ChatCompletionChunk, Model } from "openai/resources/index";
import {
ChatCompletionChunk,
ChatCompletionCreateParams,
Model,
} from "openai/resources/index";
import { DeepseekConfig } from "../types.js";
import { chatChunk, customFetch } from "../util.js";
import { OpenAIApi } from "./OpenAI.js";
Expand All @@ -15,6 +19,26 @@ export class DeepSeekApi extends OpenAIApi {
});
}

modifyChatBody<T extends ChatCompletionCreateParams>(body: T): T {
const modifiedBody = super.modifyChatBody(body);

modifiedBody.messages = modifiedBody.messages.map((message) => {
if (
message.role !== "assistant" ||
(message as any).reasoning_content !== undefined
) {
return message;
}

return {
...message,
reasoning_content: "",
} as typeof message;
}) as T["messages"];

return modifiedBody;
}

async *fimStream(
body: FimCreateParamsStreaming,
signal: AbortSignal,
Expand Down
70 changes: 70 additions & 0 deletions packages/openai-adapters/src/test/main.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,76 @@ describe("Configuration", () => {
);
});

it("should add empty DeepSeek reasoning_content when replaying assistant tool calls", () => {
const deepseek = constructLlmApi({
provider: "deepseek",
apiKey: "sk-xxx",
}) as OpenAIApi;

const body = deepseek.modifyChatBody({
model: "deepseek-v4-flash",
stream: true,
messages: [
{ role: "user", content: "@src/file.ts please review this code" },
{
role: "assistant",
content: "",
tool_calls: [
{
id: "call_read",
type: "function",
function: {
name: "Read",
arguments: '{"filepath":"src/file.ts"}',
},
},
],
},
{
role: "tool",
tool_call_id: "call_read",
content: "file contents",
},
],
} as any);

expect((body.messages[1] as any).reasoning_content).toBe("");
});

it("should not overwrite existing DeepSeek reasoning_content", () => {
const deepseek = constructLlmApi({
provider: "deepseek",
apiKey: "sk-xxx",
}) as OpenAIApi;

const body = deepseek.modifyChatBody({
model: "deepseek-v4-flash",
stream: true,
messages: [
{ role: "user", content: "Use a tool" },
{
role: "assistant",
content: "",
reasoning_content: "I should inspect the requested file.",
tool_calls: [
{
id: "call_read",
type: "function",
function: {
name: "Read",
arguments: '{"filepath":"src/file.ts"}',
},
},
],
},
],
} as any);

expect((body.messages[1] as any).reasoning_content).toBe(
"I should inspect the requested file.",
);
});

it("should configure Inception OpenAI client with correct apiBase and apiKey", () => {
const inception = constructLlmApi({
provider: "inception",
Expand Down
Loading