feat(agents): default codex transport to websocket-first

This commit is contained in:
Peter Steinberger
2026-02-26 16:21:34 +01:00
parent 63c6080d50
commit 03d7641b0e
5 changed files with 204 additions and 0 deletions

View File

@@ -490,6 +490,160 @@ describe("applyExtraParamsToAgent", () => {
});
});
it("passes configured websocket transport through stream options", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "websocket",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("websocket");
});
it("defaults Codex transport to auto (WebSocket-first)", () => {
const { calls, agent } = createOptionsCaptureAgent();
applyExtraParamsToAgent(agent, undefined, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("auto");
});
it("does not set transport defaults for non-Codex providers", () => {
const { calls, agent } = createOptionsCaptureAgent();
applyExtraParamsToAgent(agent, undefined, "openai", "gpt-5");
const model = {
api: "openai-responses",
provider: "openai",
id: "gpt-5",
} as Model<"openai-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBeUndefined();
});
it("allows forcing Codex transport to SSE", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "sse",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("sse");
});
it("lets runtime options override configured transport", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "websocket",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, { transport: "sse" });
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("sse");
});
it("falls back to Codex default transport when configured value is invalid", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "udp",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("auto");
});
it("disables prompt caching for non-Anthropic Bedrock models", () => {
const { calls, agent } = createOptionsCaptureAgent();

View File

@@ -117,6 +117,13 @@ function createStreamFnWithExtraParams(
if (typeof extraParams.maxTokens === "number") {
streamParams.maxTokens = extraParams.maxTokens;
}
const transport = extraParams.transport;
if (transport === "sse" || transport === "websocket" || transport === "auto") {
streamParams.transport = transport;
} else if (transport != null) {
const transportSummary = typeof transport === "string" ? transport : typeof transport;
log.warn(`ignoring invalid transport param: ${transportSummary}`);
}
const cacheRetention = resolveCacheRetention(extraParams, provider);
if (cacheRetention) {
streamParams.cacheRetention = cacheRetention;
@@ -234,6 +241,15 @@ function createOpenAIResponsesStoreWrapper(baseStreamFn: StreamFn | undefined):
};
}
function createCodexDefaultTransportWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) =>
underlying(model, context, {
...options,
transport: options?.transport ?? "auto",
});
}
function isAnthropic1MModel(modelId: string): boolean {
const normalized = modelId.trim().toLowerCase();
return ANTHROPIC_1M_MODEL_PREFIXES.some((prefix) => normalized.startsWith(prefix));
@@ -652,6 +668,10 @@ export function applyExtraParamsToAgent(
modelId,
agentId,
});
if (provider === "openai-codex") {
// Default Codex to WebSocket-first when nothing else specifies transport.
agent.streamFn = createCodexDefaultTransportWrapper(agent.streamFn);
}
const override =
extraParamsOverride && Object.keys(extraParamsOverride).length > 0
? Object.fromEntries(