diff --git a/src/agents/pi-embedded-runner-extraparams.e2e.test.ts b/src/agents/pi-embedded-runner-extraparams.e2e.test.ts index 2053a87d668..f2f6af57d20 100644 --- a/src/agents/pi-embedded-runner-extraparams.e2e.test.ts +++ b/src/agents/pi-embedded-runner-extraparams.e2e.test.ts @@ -91,4 +91,50 @@ describe("applyExtraParamsToAgent", () => { "X-Custom": "1", }); }); + + it("forces store=true for direct OpenAI Responses payloads", () => { + const payload = { store: false }; + const baseStreamFn: StreamFn = (_model, _context, options) => { + options?.onPayload?.(payload); + return new AssistantMessageEventStream(); + }; + const agent = { streamFn: baseStreamFn }; + + applyExtraParamsToAgent(agent, undefined, "openai", "gpt-5"); + + const model = { + api: "openai-responses", + provider: "openai", + id: "gpt-5", + baseUrl: "https://api.openai.com/v1", + } as Model<"openai-responses">; + const context: Context = { messages: [] }; + + void agent.streamFn?.(model, context, {}); + + expect(payload.store).toBe(true); + }); + + it("does not force store for OpenAI Responses routed through non-OpenAI base URLs", () => { + const payload = { store: false }; + const baseStreamFn: StreamFn = (_model, _context, options) => { + options?.onPayload?.(payload); + return new AssistantMessageEventStream(); + }; + const agent = { streamFn: baseStreamFn }; + + applyExtraParamsToAgent(agent, undefined, "openai", "gpt-5"); + + const model = { + api: "openai-responses", + provider: "openai", + id: "gpt-5", + baseUrl: "https://proxy.example.com/v1", + } as Model<"openai-responses">; + const context: Context = { messages: [] }; + + void agent.streamFn?.(model, context, {}); + + expect(payload.store).toBe(false); + }); }); diff --git a/src/agents/pi-embedded-runner/extra-params.ts b/src/agents/pi-embedded-runner/extra-params.ts index fdfbaa47c21..61734cd0bad 100644 --- a/src/agents/pi-embedded-runner/extra-params.ts +++ b/src/agents/pi-embedded-runner/extra-params.ts @@ -8,6 +8,8 @@ const OPENROUTER_APP_HEADERS: Record = { "HTTP-Referer": "https://openclaw.ai", "X-Title": "OpenClaw", }; +const OPENAI_RESPONSES_APIS = new Set(["openai-responses", "openai-codex-responses"]); +const OPENAI_RESPONSES_PROVIDERS = new Set(["openai", "openai-codex"]); /** * Resolve provider-specific extra params from model config. @@ -101,6 +103,57 @@ function createStreamFnWithExtraParams( return wrappedStreamFn; } +function isDirectOpenAIBaseUrl(baseUrl: unknown): boolean { + if (typeof baseUrl !== "string" || !baseUrl.trim()) { + return true; + } + + try { + const host = new URL(baseUrl).hostname.toLowerCase(); + return host === "api.openai.com" || host === "chatgpt.com"; + } catch { + const normalized = baseUrl.toLowerCase(); + return normalized.includes("api.openai.com") || normalized.includes("chatgpt.com"); + } +} + +function shouldForceResponsesStore(model: { + api?: unknown; + provider?: unknown; + baseUrl?: unknown; +}): boolean { + if (typeof model.api !== "string" || typeof model.provider !== "string") { + return false; + } + if (!OPENAI_RESPONSES_APIS.has(model.api)) { + return false; + } + if (!OPENAI_RESPONSES_PROVIDERS.has(model.provider)) { + return false; + } + return isDirectOpenAIBaseUrl(model.baseUrl); +} + +function createOpenAIResponsesStoreWrapper(baseStreamFn: StreamFn | undefined): StreamFn { + const underlying = baseStreamFn ?? streamSimple; + return (model, context, options) => { + if (!shouldForceResponsesStore(model)) { + return underlying(model, context, options); + } + + const originalOnPayload = options?.onPayload; + return underlying(model, context, { + ...options, + onPayload: (payload) => { + if (payload && typeof payload === "object") { + (payload as { store?: unknown }).store = true; + } + originalOnPayload?.(payload); + }, + }); + }; +} + /** * Create a streamFn wrapper that adds OpenRouter app attribution headers. * These headers allow OpenClaw to appear on OpenRouter's leaderboard. @@ -153,4 +206,9 @@ export function applyExtraParamsToAgent( log.debug(`applying OpenRouter app attribution headers for ${provider}/${modelId}`); agent.streamFn = createOpenRouterHeadersWrapper(agent.streamFn); } + + // Work around upstream pi-ai hardcoding `store: false` for Responses API. + // Force `store=true` for direct OpenAI/OpenAI Codex providers so multi-turn + // server-side conversation state is preserved. + agent.streamFn = createOpenAIResponsesStoreWrapper(agent.streamFn); }