Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
122 changes: 122 additions & 0 deletions src/__tests__/reasoning-web-search.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -549,3 +549,125 @@ describe("POST /v1/messages (thinking blocks non-streaming)", () => {
expect(body.content[0].type).toBe("text");
});
});

// ─── Chat Completions: reasoning_content (OpenRouter format) ────────────────

interface ChatCompletionChunk {
id: string;
object: string;
created: number;
model: string;
choices: {
index: number;
delta: { role?: string; content?: string | null; reasoning_content?: string };
finish_reason: string | null;
}[];
}

function parseChatCompletionSSEChunks(body: string): ChatCompletionChunk[] {
const chunks: ChatCompletionChunk[] = [];
for (const line of body.split("\n")) {
if (line.startsWith("data: ") && line.slice(6).trim() !== "[DONE]") {
chunks.push(JSON.parse(line.slice(6)) as ChatCompletionChunk);
}
}
return chunks;
}

describe("POST /v1/chat/completions (reasoning_content streaming)", () => {
it("emits reasoning_content deltas before content deltas", async () => {
instance = await createServer(allFixtures);
const res = await post(`${instance.url}/v1/chat/completions`, {
model: "gpt-4",
messages: [{ role: "user", content: "think" }],
stream: true,
});

expect(res.status).toBe(200);
const chunks = parseChatCompletionSSEChunks(res.body);

const reasoningChunks = chunks.filter((c) => c.choices[0]?.delta.reasoning_content);
const contentChunks = chunks.filter(
(c) => c.choices[0]?.delta.content && c.choices[0].delta.content.length > 0,
);

expect(reasoningChunks.length).toBeGreaterThan(0);
expect(contentChunks.length).toBeGreaterThan(0);

// All reasoning chunks appear before all content chunks
const lastReasoningIdx = chunks.lastIndexOf(reasoningChunks[reasoningChunks.length - 1]);
const firstContentIdx = chunks.indexOf(contentChunks[0]);
expect(lastReasoningIdx).toBeLessThan(firstContentIdx);
});

it("reasoning_content deltas reconstruct full reasoning text", async () => {
instance = await createServer(allFixtures);
const res = await post(`${instance.url}/v1/chat/completions`, {
model: "gpt-4",
messages: [{ role: "user", content: "think" }],
stream: true,
});

const chunks = parseChatCompletionSSEChunks(res.body);
const reasoning = chunks.map((c) => c.choices[0]?.delta.reasoning_content ?? "").join("");
expect(reasoning).toBe("Let me think step by step about this problem.");
});

it("content deltas still reconstruct full text", async () => {
instance = await createServer(allFixtures);
const res = await post(`${instance.url}/v1/chat/completions`, {
model: "gpt-4",
messages: [{ role: "user", content: "think" }],
stream: true,
});

const chunks = parseChatCompletionSSEChunks(res.body);
const content = chunks.map((c) => c.choices[0]?.delta.content ?? "").join("");
expect(content).toBe("The answer is 42.");
});

it("no reasoning_content when reasoning is absent", async () => {
instance = await createServer(allFixtures);
const res = await post(`${instance.url}/v1/chat/completions`, {
model: "gpt-4",
messages: [{ role: "user", content: "plain" }],
stream: true,
});

const chunks = parseChatCompletionSSEChunks(res.body);
const reasoningChunks = chunks.filter((c) => c.choices[0]?.delta.reasoning_content);
expect(reasoningChunks).toHaveLength(0);
});
});

describe("POST /v1/chat/completions (reasoning_content non-streaming)", () => {
it("includes reasoning_content in non-streaming response", async () => {
instance = await createServer(allFixtures);
const res = await post(`${instance.url}/v1/chat/completions`, {
model: "gpt-4",
messages: [{ role: "user", content: "think" }],
stream: false,
});

expect(res.status).toBe(200);
const body = JSON.parse(res.body);
expect(body.object).toBe("chat.completion");
expect(body.choices[0].message.content).toBe("The answer is 42.");
expect(body.choices[0].message.reasoning_content).toBe(
"Let me think step by step about this problem.",
);
});

it("no reasoning_content when reasoning is absent", async () => {
instance = await createServer(allFixtures);
const res = await post(`${instance.url}/v1/chat/completions`, {
model: "gpt-4",
messages: [{ role: "user", content: "plain" }],
stream: false,
});

const body = JSON.parse(res.body);
expect(body.choices[0].message.content).toBe("Just plain text.");
expect(body.choices[0].message.reasoning_content).toBeUndefined();
});
});
32 changes: 32 additions & 0 deletions src/__tests__/stream-collapse.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1792,3 +1792,35 @@ describe("collapseAnthropicSSE with thinking", () => {
expect(result.reasoning).toBeUndefined();
});
});

describe("collapseOpenAISSE with chat completions reasoning_content", () => {
it("extracts reasoning from reasoning_content delta fields", () => {
const body = [
`data: ${JSON.stringify({ id: "chatcmpl-1", choices: [{ delta: { reasoning_content: "Let me " } }] })}`,
"",
`data: ${JSON.stringify({ id: "chatcmpl-1", choices: [{ delta: { reasoning_content: "think." } }] })}`,
"",
`data: ${JSON.stringify({ id: "chatcmpl-1", choices: [{ delta: { content: "Answer" } }] })}`,
"",
"data: [DONE]",
"",
].join("\n");

const result = collapseOpenAISSE(body);
expect(result.content).toBe("Answer");
expect(result.reasoning).toBe("Let me think.");
});

it("handles reasoning_content without regular content", () => {
const body = [
`data: ${JSON.stringify({ id: "chatcmpl-2", choices: [{ delta: { reasoning_content: "Thinking only" } }] })}`,
"",
"data: [DONE]",
"",
].join("\n");

const result = collapseOpenAISSE(body);
expect(result.reasoning).toBe("Thinking only");
expect(result.content).toBe("");
});
});
20 changes: 10 additions & 10 deletions src/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,7 @@ export function buildTextChunks(
const created = Math.floor(Date.now() / 1000);
const chunks: SSEChunk[] = [];

// Role chunk
chunks.push({
id,
object: "chat.completion.chunk",
created,
model,
choices: [{ index: 0, delta: { role: "assistant", content: "" }, finish_reason: null }],
});

// Reasoning chunks (emitted before content chunks)
// Reasoning chunks (emitted before content, OpenRouter format)
if (reasoning) {
for (let i = 0; i < reasoning.length; i += chunkSize) {
const slice = reasoning.slice(i, i + chunkSize);
Expand All @@ -95,6 +86,15 @@ export function buildTextChunks(
}
}

// Role chunk
chunks.push({
id,
object: "chat.completion.chunk",
created,
model,
choices: [{ index: 0, delta: { role: "assistant", content: "" }, finish_reason: null }],
});

// Content chunks
for (let i = 0; i < content.length; i += chunkSize) {
const slice = content.slice(i, i + chunkSize);
Expand Down
5 changes: 5 additions & 0 deletions src/stream-collapse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,11 @@ export function collapseOpenAISSE(body: string): CollapseResult {
const delta = choices[0].delta as Record<string, unknown> | undefined;
if (!delta) continue;

// Reasoning content (OpenRouter / chat completions format)
if (typeof delta.reasoning_content === "string") {
reasoning += delta.reasoning_content;
}

// Text content
if (typeof delta.content === "string") {
content += delta.content;
Expand Down
Loading