Agents: allow gpt-5.3-codex-spark in fallback and thinking (#14990)

* Agents: allow gpt-5.3-codex-spark in fallback and thinking

* Fix: model picker issue for openai-codex/gpt-5.3-codex-spark

Fixed an issue in the model picker.
This commit is contained in:
Lucky
2026-02-13 12:39:22 +01:00
committed by GitHub
parent 417509c539
commit e3cb2564d7
8 changed files with 122 additions and 2 deletions

View File

@@ -14,6 +14,7 @@ const CODEX_MODELS = [
"gpt-5.2",
"gpt-5.2-codex",
"gpt-5.3-codex",
"gpt-5.3-codex-spark",
"gpt-5.1-codex",
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",

View File

@@ -84,4 +84,43 @@ describe("loadModelCatalog", () => {
expect(result).toEqual([{ id: "gpt-4.1", name: "GPT-4.1", provider: "openai" }]);
expect(warnSpy).toHaveBeenCalledTimes(1);
});
it("adds openai-codex/gpt-5.3-codex-spark when base gpt-5.3-codex exists", async () => {
__setModelCatalogImportForTest(
async () =>
({
AuthStorage: class {},
ModelRegistry: class {
getAll() {
return [
{
id: "gpt-5.3-codex",
provider: "openai-codex",
name: "GPT-5.3 Codex",
reasoning: true,
contextWindow: 200000,
input: ["text"],
},
{
id: "gpt-5.2-codex",
provider: "openai-codex",
name: "GPT-5.2 Codex",
},
];
}
},
}) as unknown as PiSdkModule,
);
const result = await loadModelCatalog({ config: {} as OpenClawConfig });
expect(result).toContainEqual(
expect.objectContaining({
provider: "openai-codex",
id: "gpt-5.3-codex-spark",
}),
);
const spark = result.find((entry) => entry.id === "gpt-5.3-codex-spark");
expect(spark?.name).toBe("gpt-5.3-codex-spark");
expect(spark?.reasoning).toBe(true);
});
});

View File

@@ -27,6 +27,35 @@ let hasLoggedModelCatalogError = false;
const defaultImportPiSdk = () => import("./pi-model-discovery.js");
let importPiSdk = defaultImportPiSdk;
const CODEX_PROVIDER = "openai-codex";
const OPENAI_CODEX_GPT53_MODEL_ID = "gpt-5.3-codex";
const OPENAI_CODEX_GPT53_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
function applyOpenAICodexSparkFallback(models: ModelCatalogEntry[]): void {
const hasSpark = models.some(
(entry) =>
entry.provider === CODEX_PROVIDER &&
entry.id.toLowerCase() === OPENAI_CODEX_GPT53_SPARK_MODEL_ID,
);
if (hasSpark) {
return;
}
const baseModel = models.find(
(entry) =>
entry.provider === CODEX_PROVIDER && entry.id.toLowerCase() === OPENAI_CODEX_GPT53_MODEL_ID,
);
if (!baseModel) {
return;
}
models.push({
...baseModel,
id: OPENAI_CODEX_GPT53_SPARK_MODEL_ID,
name: OPENAI_CODEX_GPT53_SPARK_MODEL_ID,
});
}
export function resetModelCatalogCacheForTest() {
modelCatalogPromise = null;
hasLoggedModelCatalogError = false;
@@ -94,6 +123,7 @@ export async function loadModelCatalog(params?: {
const input = Array.isArray(entry?.input) ? entry.input : undefined;
models.push({ id, name, provider, contextWindow, reasoning, input });
}
applyOpenAICodexSparkFallback(models);
if (models.length === 0) {
// If we found nothing, don't cache this result so we can try again.

View File

@@ -172,6 +172,43 @@ describe("resolveModel", () => {
});
});
it("builds an openai-codex fallback for gpt-5.3-codex-spark", () => {
const templateModel = {
id: "gpt-5.2-codex",
name: "GPT-5.2 Codex",
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"] as const,
cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 },
contextWindow: 272000,
maxTokens: 128000,
};
vi.mocked(discoverModels).mockReturnValue({
find: vi.fn((provider: string, modelId: string) => {
if (provider === "openai-codex" && modelId === "gpt-5.2-codex") {
return templateModel;
}
return null;
}),
} as unknown as ReturnType<typeof discoverModels>);
const result = resolveModel("openai-codex", "gpt-5.3-codex-spark", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.3-codex-spark",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
contextWindow: 272000,
maxTokens: 128000,
});
});
it("builds an anthropic forward-compat fallback for claude-opus-4-6", () => {
const templateModel = {
id: "claude-opus-4-5",
@@ -283,6 +320,12 @@ describe("resolveModel", () => {
expect(result.error).toBe("Unknown model: openai-codex/gpt-4.1-mini");
});
it("errors for unknown gpt-5.3-codex-* variants", () => {
const result = resolveModel("openai-codex", "gpt-5.3-codex-unknown", "/tmp/agent");
expect(result.model).toBeUndefined();
expect(result.error).toBe("Unknown model: openai-codex/gpt-5.3-codex-unknown");
});
it("uses codex fallback even when openai-codex provider is configured", () => {
// This test verifies the ordering: codex fallback must fire BEFORE the generic providerCfg fallback.
// If ordering is wrong, the generic fallback would use api: "openai-responses" (the default)

View File

@@ -20,6 +20,7 @@ type InlineProviderConfig = {
};
const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex";
const OPENAI_CODEX_GPT_53_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
const OPENAI_CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const;
@@ -39,7 +40,11 @@ function resolveOpenAICodexGpt53FallbackModel(
if (normalizedProvider !== "openai-codex") {
return undefined;
}
if (trimmedModelId.toLowerCase() !== OPENAI_CODEX_GPT_53_MODEL_ID) {
const loweredModelId = trimmedModelId.toLowerCase();
if (
loweredModelId !== OPENAI_CODEX_GPT_53_MODEL_ID &&
loweredModelId !== OPENAI_CODEX_GPT_53_SPARK_MODEL_ID
) {
return undefined;
}

View File

@@ -154,7 +154,7 @@ describe("directive behavior", () => {
const texts = (Array.isArray(res) ? res : [res]).map((entry) => entry?.text).filter(Boolean);
expect(texts).toContain(
'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.3-codex, openai-codex/gpt-5.2-codex, openai-codex/gpt-5.1-codex, github-copilot/gpt-5.2-codex or github-copilot/gpt-5.2.',
'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.3-codex, openai-codex/gpt-5.3-codex-spark, openai-codex/gpt-5.2-codex, openai-codex/gpt-5.1-codex, github-copilot/gpt-5.2-codex or github-copilot/gpt-5.2.',
);
});
});

View File

@@ -44,6 +44,7 @@ describe("listThinkingLevels", () => {
it("includes xhigh for codex models", () => {
expect(listThinkingLevels(undefined, "gpt-5.2-codex")).toContain("xhigh");
expect(listThinkingLevels(undefined, "gpt-5.3-codex")).toContain("xhigh");
expect(listThinkingLevels(undefined, "gpt-5.3-codex-spark")).toContain("xhigh");
});
it("includes xhigh for openai gpt-5.2", () => {

View File

@@ -24,6 +24,7 @@ export function isBinaryThinkingProvider(provider?: string | null): boolean {
export const XHIGH_MODEL_REFS = [
"openai/gpt-5.2",
"openai-codex/gpt-5.3-codex",
"openai-codex/gpt-5.3-codex-spark",
"openai-codex/gpt-5.2-codex",
"openai-codex/gpt-5.1-codex",
"github-copilot/gpt-5.2-codex",