Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 35 additions & 1 deletion src/app/v1/_lib/codex/chat-completions-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,32 @@ import { ProxyStatusTracker } from "@/lib/proxy-status-tracker";
import { SessionTracker } from "@/lib/session-tracker";
import type { ChatCompletionRequest } from "./types/compatible";

function normalizeResponseInput(request: Record<string, unknown>): void {
if (!("input" in request)) return;

const input = request.input;

// OpenAI Responses API supports a string shortcut:
// { model, input: "hello" } -> [{ role: "user", content: [{ type: "input_text", text: "hello" }] }]
if (typeof input === "string") {
const text = input.trim();
request.input = text.length
? [
{
role: "user",
content: [{ type: "input_text", text }],
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

In the normalized input, you're using type: "input_text". However, other parts of the codebase, like the existing test case Response(input) 请求成功路径必须执行全链路 guards/filters 再 forward/dispatch in tests/unit/proxy/chat-completions-handler-guard-pipeline.test.ts, use type: "text". To maintain consistency and avoid potential issues in downstream processing, it would be better to use "text" here as well.

Suggested change
content: [{ type: "input_text", text }],
content: [{ type: "text", text }],

},
]
: [];
Comment on lines +31 to +39
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Consider whether whitespace-only input (e.g., " ") should be rejected with a validation error instead of silently converting to empty array []

Note: If this suggestion doesn't match your team's coding style, reply to this and let me know. I'll remember it for next time!

Prompt To Fix With AI
This is a comment left during a code review.
Path: src/app/v1/_lib/codex/chat-completions-handler.ts
Line: 31:39

Comment:
Consider whether whitespace-only input (e.g., `"   "`) should be rejected with a validation error instead of silently converting to empty array `[]`

<sub>Note: If this suggestion doesn't match your team's coding style, reply to this and let me know. I'll remember it for next time!</sub>

How can I resolve this? If you propose a fix, please make it concise.

return;
}

// Some clients may send a single object instead of an array. Wrap it for compatibility.
if (input && typeof input === "object" && !Array.isArray(input)) {
request.input = [input];
}
}

/**
* 处理 OpenAI Compatible API 请求 (/v1/chat/completions)
*
Expand All @@ -43,7 +69,12 @@ export async function handleChatCompletions(c: Context): Promise<Response> {

// 格式检测
const isOpenAIFormat = "messages" in request && Array.isArray(request.messages);
const isResponseAPIFormat = "input" in request && Array.isArray(request.input);
const inputValue = (request as Record<string, unknown>).input;
const isResponseAPIFormat =
"input" in request &&
(Array.isArray(inputValue) ||
typeof inputValue === "string" ||
(typeof inputValue === "object" && inputValue !== null));

if (!isOpenAIFormat && !isResponseAPIFormat) {
const response = new Response(
Expand Down Expand Up @@ -158,6 +189,9 @@ export async function handleChatCompletions(c: Context): Promise<Response> {
);
return await attachSessionIdToErrorResponse(session.sessionId, response);
}

// Normalize for downstream guards/filters.
normalizeResponseInput(request as Record<string, unknown>);
}

const type = session.isCountTokensRequest() ? RequestType.COUNT_TOKENS : RequestType.CHAT;
Expand Down
39 changes: 39 additions & 0 deletions tests/unit/proxy/chat-completions-handler-guard-pipeline.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,45 @@ describe("handleChatCompletions:必须走 GuardPipeline", () => {
]);
});

test("Response(input) 支持 input 为 string(OpenAI shortcut)", async () => {
h.session = createSession({
model: "gpt-4.1-mini",
input: "hi",
stream: false,
});

const { handleChatCompletions } = await import("@/app/v1/_lib/codex/chat-completions-handler");
const res = await handleChatCompletions({} as any);

expect(res.status).toBe(200);
expect(h.session.originalFormat).toBe("response");
expect((h.session.request.message as any).input).toEqual([
{
role: "user",
content: [{ type: "input_text", text: "hi" }],
},
]);
expect(h.callOrder).toEqual([
"auth",
"sensitive",
"client",
"model",
"version",
"probe",
"session",
"warmup",
"requestFilter",
"rateLimit",
"provider",
"providerRequestFilter",
"messageContext",
"concurrencyInc",
"forward",
"dispatch",
"concurrencyDec",
]);
});

test("当 sessionId 未分配时,不应进行并发计数(覆盖分支)", async () => {
h.assignSessionId = false;
h.session = createSession({
Expand Down
Loading