fix(gateway): tighten openai-http edge handling

This commit is contained in:
Peter Steinberger
2026-02-22 11:28:55 +00:00
parent 05358173da
commit 0f989d3109
3 changed files with 258 additions and 222 deletions

View File

@@ -334,6 +334,21 @@ describe("OpenAI-compatible HTTP API (e2e)", () => {
expect(msg.content).toBe("hello"); expect(msg.content).toBe("hello");
} }
{
agentCommand.mockClear();
agentCommand.mockResolvedValueOnce({ payloads: [{ text: "" }] } as never);
const res = await postChatCompletions(port, {
stream: false,
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
});
expect(res.status).toBe(200);
const json = (await res.json()) as Record<string, unknown>;
const choice0 = (json.choices as Array<Record<string, unknown>>)[0] ?? {};
const msg = (choice0.message as Record<string, unknown> | undefined) ?? {};
expect(msg.content).toBe("No response from OpenClaw.");
}
{ {
const res = await postChatCompletions(port, { const res = await postChatCompletions(port, {
model: "openclaw", model: "openclaw",
@@ -475,6 +490,31 @@ describe("OpenAI-compatible HTTP API (e2e)", () => {
expect(fallbackText).toContain("[DONE]"); expect(fallbackText).toContain("[DONE]");
expect(fallbackText).toContain("hello"); expect(fallbackText).toContain("hello");
} }
{
agentCommand.mockClear();
agentCommand.mockRejectedValueOnce(new Error("boom"));
const errorRes = await postChatCompletions(port, {
stream: true,
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
});
expect(errorRes.status).toBe(200);
const errorText = await errorRes.text();
const errorData = parseSseDataLines(errorText);
expect(errorData[errorData.length - 1]).toBe("[DONE]");
const errorChunks = errorData
.filter((d) => d !== "[DONE]")
.map((d) => JSON.parse(d) as Record<string, unknown>);
const stopChoice = errorChunks
.flatMap((c) => (c.choices as Array<Record<string, unknown>> | undefined) ?? [])
.find((choice) => choice.finish_reason === "stop");
expect((stopChoice?.delta as Record<string, unknown> | undefined)?.content).toBe(
"Error: internal error",
);
}
} finally { } finally {
// shared server // shared server
} }

View File

@@ -41,6 +41,51 @@ function writeSse(res: ServerResponse, data: unknown) {
res.write(`data: ${JSON.stringify(data)}\n\n`); res.write(`data: ${JSON.stringify(data)}\n\n`);
} }
function buildAgentCommandInput(params: {
prompt: { message: string; extraSystemPrompt?: string };
sessionKey: string;
runId: string;
}) {
return {
message: params.prompt.message,
extraSystemPrompt: params.prompt.extraSystemPrompt,
sessionKey: params.sessionKey,
runId: params.runId,
deliver: false as const,
messageChannel: "webchat" as const,
bestEffortDeliver: false as const,
};
}
function writeAssistantRoleChunk(res: ServerResponse, params: { runId: string; model: string }) {
writeSse(res, {
id: params.runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: params.model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
}
function writeAssistantContentChunk(
res: ServerResponse,
params: { runId: string; model: string; content: string; finishReason: "stop" | null },
) {
writeSse(res, {
id: params.runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: params.model,
choices: [
{
index: 0,
delta: { content: params.content },
finish_reason: params.finishReason,
},
],
});
}
function asMessages(val: unknown): OpenAiChatMessage[] { function asMessages(val: unknown): OpenAiChatMessage[] {
return Array.isArray(val) ? (val as OpenAiChatMessage[]) : []; return Array.isArray(val) ? (val as OpenAiChatMessage[]) : [];
} }
@@ -194,22 +239,15 @@ export async function handleOpenAiHttpRequest(
const runId = `chatcmpl_${randomUUID()}`; const runId = `chatcmpl_${randomUUID()}`;
const deps = createDefaultDeps(); const deps = createDefaultDeps();
const commandInput = buildAgentCommandInput({
prompt,
sessionKey,
runId,
});
if (!stream) { if (!stream) {
try { try {
const result = await agentCommand( const result = await agentCommand(commandInput, defaultRuntime, deps);
{
message: prompt.message,
extraSystemPrompt: prompt.extraSystemPrompt,
sessionKey,
runId,
deliver: false,
messageChannel: "webchat",
bestEffortDeliver: false,
},
defaultRuntime,
deps,
);
const content = resolveAgentResponseText(result); const content = resolveAgentResponseText(result);
@@ -258,28 +296,15 @@ export async function handleOpenAiHttpRequest(
if (!wroteRole) { if (!wroteRole) {
wroteRole = true; wroteRole = true;
writeSse(res, { writeAssistantRoleChunk(res, { runId, model });
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
} }
sawAssistantDelta = true; sawAssistantDelta = true;
writeSse(res, { writeAssistantContentChunk(res, {
id: runId, runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model, model,
choices: [ content,
{ finishReason: null,
index: 0,
delta: { content },
finish_reason: null,
},
],
}); });
return; return;
} }
@@ -302,19 +327,7 @@ export async function handleOpenAiHttpRequest(
void (async () => { void (async () => {
try { try {
const result = await agentCommand( const result = await agentCommand(commandInput, defaultRuntime, deps);
{
message: prompt.message,
extraSystemPrompt: prompt.extraSystemPrompt,
sessionKey,
runId,
deliver: false,
messageChannel: "webchat",
bestEffortDeliver: false,
},
defaultRuntime,
deps,
);
if (closed) { if (closed) {
return; return;
@@ -323,30 +336,17 @@ export async function handleOpenAiHttpRequest(
if (!sawAssistantDelta) { if (!sawAssistantDelta) {
if (!wroteRole) { if (!wroteRole) {
wroteRole = true; wroteRole = true;
writeSse(res, { writeAssistantRoleChunk(res, { runId, model });
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
} }
const content = resolveAgentResponseText(result); const content = resolveAgentResponseText(result);
sawAssistantDelta = true; sawAssistantDelta = true;
writeSse(res, { writeAssistantContentChunk(res, {
id: runId, runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model, model,
choices: [ content,
{ finishReason: null,
index: 0,
delta: { content },
finish_reason: null,
},
],
}); });
} }
} catch (err) { } catch (err) {
@@ -354,18 +354,11 @@ export async function handleOpenAiHttpRequest(
if (closed) { if (closed) {
return; return;
} }
writeSse(res, { writeAssistantContentChunk(res, {
id: runId, runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model, model,
choices: [ content: "Error: internal error",
{ finishReason: "stop",
index: 0,
delta: { content: "Error: internal error" },
finish_reason: "stop",
},
],
}); });
emitAgentEvent({ emitAgentEvent({
runId, runId,

View File

@@ -81,7 +81,106 @@ const whatsappRegistry = createRegistry([
]); ]);
const emptyRegistry = createRegistry([]); const emptyRegistry = createRegistry([]);
type ModelCatalogRpcEntry = {
id: string;
name: string;
provider: string;
contextWindow?: number;
};
type PiCatalogFixtureEntry = {
id: string;
provider: string;
name?: string;
contextWindow?: number;
};
const buildPiCatalogFixture = (): PiCatalogFixtureEntry[] => [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
const expectedSortedCatalog = (): ModelCatalogRpcEntry[] => [
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
];
describe("gateway server models + voicewake", () => { describe("gateway server models + voicewake", () => {
const listModels = async () => rpcReq<{ models: ModelCatalogRpcEntry[] }>(ws, "models.list");
const seedPiCatalog = () => {
piSdkMock.enabled = true;
piSdkMock.models = buildPiCatalogFixture();
};
const withModelsConfig = async <T>(config: unknown, run: () => Promise<T>): Promise<T> => {
const configPath = process.env.OPENCLAW_CONFIG_PATH;
if (!configPath) {
throw new Error("Missing OPENCLAW_CONFIG_PATH");
}
let previousConfig: string | undefined;
try {
previousConfig = await fs.readFile(configPath, "utf-8");
} catch (err) {
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(configPath, JSON.stringify(config, null, 2), "utf-8");
clearConfigCache();
return await run();
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
};
const withTempHome = async <T>(fn: (homeDir: string) => Promise<T>): Promise<T> => { const withTempHome = async <T>(fn: (homeDir: string) => Promise<T>): Promise<T> => {
const tempHome = await createTempHomeEnv("openclaw-home-"); const tempHome = await createTempHomeEnv("openclaw-home-");
try { try {
@@ -178,171 +277,75 @@ describe("gateway server models + voicewake", () => {
}); });
test("models.list returns model catalog", async () => { test("models.list returns model catalog", async () => {
piSdkMock.enabled = true; seedPiCatalog();
piSdkMock.models = [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
const res1 = await rpcReq<{ const res1 = await listModels();
models: Array<{ const res2 = await listModels();
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
const res2 = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
expect(res1.ok).toBe(true); expect(res1.ok).toBe(true);
expect(res2.ok).toBe(true); expect(res2.ok).toBe(true);
const models = res1.payload?.models ?? []; const models = res1.payload?.models ?? [];
expect(models).toEqual([ expect(models).toEqual(expectedSortedCatalog());
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
expect(piSdkMock.discoverCalls).toBe(1); expect(piSdkMock.discoverCalls).toBe(1);
}); });
test("models.list filters to allowlisted configured models by default", async () => { test("models.list filters to allowlisted configured models by default", async () => {
const configPath = process.env.OPENCLAW_CONFIG_PATH; await withModelsConfig(
if (!configPath) { {
throw new Error("Missing OPENCLAW_CONFIG_PATH"); agents: {
} defaults: {
let previousConfig: string | undefined; model: { primary: "openai/gpt-test-z" },
try { models: {
previousConfig = await fs.readFile(configPath, "utf-8"); "openai/gpt-test-z": {},
} catch (err) { "anthropic/claude-test-a": {},
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(
configPath,
JSON.stringify(
{
agents: {
defaults: {
model: { primary: "openai/gpt-test-z" },
models: {
"openai/gpt-test-z": {},
"anthropic/claude-test-a": {},
},
},
}, },
}, },
null, },
2, },
), async () => {
"utf-8", seedPiCatalog();
); const res = await listModels();
clearConfigCache();
piSdkMock.enabled = true; expect(res.ok).toBe(true);
piSdkMock.models = [ expect(res.payload?.models).toEqual([
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 }, {
{ id: "claude-test-a",
id: "gpt-test-a", name: "A-Model",
name: "A-Model", provider: "anthropic",
provider: "openai", contextWindow: 200_000,
contextWindow: 8000, },
}, {
{ id: "gpt-test-z",
id: "claude-test-b", name: "gpt-test-z",
name: "B-Model", provider: "openai",
provider: "anthropic", },
contextWindow: 1000, ]);
}, },
{ );
id: "claude-test-a", });
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
const res = await rpcReq<{ test("models.list falls back to full catalog when allowlist has no catalog match", async () => {
models: Array<{ await withModelsConfig(
id: string; {
name: string; agents: {
provider: string; defaults: {
contextWindow?: number; model: { primary: "openai/not-in-catalog" },
}>; models: {
}>(ws, "models.list"); "openai/not-in-catalog": {},
},
},
},
},
async () => {
seedPiCatalog();
const res = await listModels();
expect(res.ok).toBe(true); expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual([ expect(res.payload?.models).toEqual(expectedSortedCatalog());
{ },
id: "claude-test-a", );
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
}); });
test("models.list rejects unknown params", async () => { test("models.list rejects unknown params", async () => {