chore: apply local workspace updates (#9911)

* chore: apply local workspace updates

* fix: resolve prep findings after rebase (#9898) (thanks @gumadeiras)

* refactor: centralize model allowlist normalization (#9898) (thanks @gumadeiras)

* fix: guard model allowlist initialization (#9911)

* docs: update changelog scope for #9911

* docs: remove model names from changelog entry (#9911)

* fix: satisfy type-aware lint in model allowlist (#9911)
This commit is contained in:
Gustavo Madeira Santana
2026-02-05 16:54:44 -05:00
committed by GitHub
parent 93b450349f
commit 4629054403
72 changed files with 722 additions and 251 deletions

View File

@@ -2,5 +2,5 @@
// Model id uses pi-ai's built-in Anthropic catalog.
export const DEFAULT_PROVIDER = "anthropic";
export const DEFAULT_MODEL = "claude-opus-4-6";
// Context window: Opus supports ~200k tokens (per pi-ai models.generated.ts for Opus 4.5).
// Conservative fallback used when model metadata is unavailable.
export const DEFAULT_CONTEXT_TOKENS = 200_000;

View File

@@ -140,7 +140,7 @@ describe("getApiKeyForModel", () => {
} catch (err) {
error = err;
}
expect(String(error)).toContain("openai-codex/gpt-5.2");
expect(String(error)).toContain("openai-codex/gpt-5.3-codex");
} finally {
if (previousOpenAiKey === undefined) {
delete process.env.OPENAI_API_KEY;

View File

@@ -213,7 +213,7 @@ export async function resolveApiKeyForProvider(params: {
const hasCodex = listProfilesForProvider(store, "openai-codex").length > 0;
if (hasCodex) {
throw new Error(
'No API key found for provider "openai". You are authenticated with OpenAI Codex OAuth. Use openai-codex/gpt-5.2 (ChatGPT OAuth) or set OPENAI_API_KEY for openai/gpt-5.2.',
'No API key found for provider "openai". You are authenticated with OpenAI Codex OAuth. Use openai-codex/gpt-5.3-codex (OAuth) or set OPENAI_API_KEY to use openai/gpt-5.1-codex.',
);
}
}

View File

@@ -13,9 +13,9 @@ import {
isTimeoutError,
} from "./failover-error.js";
import {
buildConfiguredAllowlistKeys,
buildModelAliasIndex,
modelKey,
parseModelRef,
resolveConfiguredModelRef,
resolveModelRefFromString,
} from "./model-selection.js";
@@ -51,28 +51,6 @@ function shouldRethrowAbort(err: unknown): boolean {
return isAbortError(err) && !isTimeoutError(err);
}
function buildAllowedModelKeys(
cfg: OpenClawConfig | undefined,
defaultProvider: string,
): Set<string> | null {
const rawAllowlist = (() => {
const modelMap = cfg?.agents?.defaults?.models ?? {};
return Object.keys(modelMap);
})();
if (rawAllowlist.length === 0) {
return null;
}
const keys = new Set<string>();
for (const raw of rawAllowlist) {
const parsed = parseModelRef(String(raw ?? ""), defaultProvider);
if (!parsed) {
continue;
}
keys.add(modelKey(parsed.provider, parsed.model));
}
return keys.size > 0 ? keys : null;
}
function resolveImageFallbackCandidates(params: {
cfg: OpenClawConfig | undefined;
defaultProvider: string;
@@ -82,7 +60,10 @@ function resolveImageFallbackCandidates(params: {
cfg: params.cfg ?? {},
defaultProvider: params.defaultProvider,
});
const allowlist = buildAllowedModelKeys(params.cfg, params.defaultProvider);
const allowlist = buildConfiguredAllowlistKeys({
cfg: params.cfg,
defaultProvider: params.defaultProvider,
});
const seen = new Set<string>();
const candidates: ModelCandidate[] = [];
@@ -166,7 +147,10 @@ function resolveFallbackCandidates(params: {
cfg: params.cfg ?? {},
defaultProvider,
});
const allowlist = buildAllowedModelKeys(params.cfg, defaultProvider);
const allowlist = buildConfiguredAllowlistKeys({
cfg: params.cfg,
defaultProvider,
});
const seen = new Set<string>();
const candidates: ModelCandidate[] = [];

View File

@@ -29,6 +29,17 @@ describe("model-selection", () => {
});
});
it("normalizes anthropic alias refs to canonical model ids", () => {
expect(parseModelRef("anthropic/opus-4.6", "openai")).toEqual({
provider: "anthropic",
model: "claude-opus-4-6",
});
expect(parseModelRef("opus-4.6", "anthropic")).toEqual({
provider: "anthropic",
model: "claude-opus-4-6",
});
});
it("should use default provider if none specified", () => {
expect(parseModelRef("claude-3-5-sonnet", "anthropic")).toEqual({
provider: "anthropic",

View File

@@ -16,6 +16,12 @@ export type ModelAliasIndex = {
byKey: Map<string, string[]>;
};
const ANTHROPIC_MODEL_ALIASES: Record<string, string> = {
"opus-4.6": "claude-opus-4-6",
"opus-4.5": "claude-opus-4-5",
"sonnet-4.5": "claude-sonnet-4-5",
};
function normalizeAliasKey(value: string): string {
return value.trim().toLowerCase();
}
@@ -59,19 +65,7 @@ function normalizeAnthropicModelId(model: string): string {
return trimmed;
}
const lower = trimmed.toLowerCase();
if (lower === "opus-4.6") {
return "claude-opus-4-6";
}
if (lower === "opus-4.5") {
return "claude-opus-4-5";
}
if (lower === "opus-4.6") {
return "claude-opus-4-6";
}
if (lower === "sonnet-4.5") {
return "claude-sonnet-4-5";
}
return trimmed;
return ANTHROPIC_MODEL_ALIASES[lower] ?? trimmed;
}
function normalizeProviderModelId(provider: string, model: string): string {
@@ -105,6 +99,33 @@ export function parseModelRef(raw: string, defaultProvider: string): ModelRef |
return { provider, model: normalizedModel };
}
export function resolveAllowlistModelKey(raw: string, defaultProvider: string): string | null {
const parsed = parseModelRef(raw, defaultProvider);
if (!parsed) {
return null;
}
return modelKey(parsed.provider, parsed.model);
}
export function buildConfiguredAllowlistKeys(params: {
cfg: OpenClawConfig | undefined;
defaultProvider: string;
}): Set<string> | null {
const rawAllowlist = Object.keys(params.cfg?.agents?.defaults?.models ?? {});
if (rawAllowlist.length === 0) {
return null;
}
const keys = new Set<string>();
for (const raw of rawAllowlist) {
const key = resolveAllowlistModelKey(String(raw ?? ""), params.defaultProvider);
if (key) {
keys.add(key);
}
}
return keys.size > 0 ? keys : null;
}
export function buildModelAliasIndex(params: {
cfg: OpenClawConfig;
defaultProvider: string;

View File

@@ -8,12 +8,12 @@ import {
describe("resolveOpencodeZenAlias", () => {
it("resolves opus alias", () => {
expect(resolveOpencodeZenAlias("opus")).toBe("claude-opus-4-5");
expect(resolveOpencodeZenAlias("opus")).toBe("claude-opus-4-6");
});
it("keeps legacy aliases working", () => {
expect(resolveOpencodeZenAlias("sonnet")).toBe("claude-opus-4-5");
expect(resolveOpencodeZenAlias("haiku")).toBe("claude-opus-4-5");
expect(resolveOpencodeZenAlias("sonnet")).toBe("claude-opus-4-6");
expect(resolveOpencodeZenAlias("haiku")).toBe("claude-opus-4-6");
expect(resolveOpencodeZenAlias("gpt4")).toBe("gpt-5.1");
expect(resolveOpencodeZenAlias("o1")).toBe("gpt-5.2");
expect(resolveOpencodeZenAlias("gemini-2.5")).toBe("gemini-3-pro");
@@ -32,14 +32,14 @@ describe("resolveOpencodeZenAlias", () => {
});
it("is case-insensitive", () => {
expect(resolveOpencodeZenAlias("OPUS")).toBe("claude-opus-4-5");
expect(resolveOpencodeZenAlias("OPUS")).toBe("claude-opus-4-6");
expect(resolveOpencodeZenAlias("Gpt5")).toBe("gpt-5.2");
});
});
describe("resolveOpencodeZenModelApi", () => {
it("maps APIs by model family", () => {
expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe("anthropic-messages");
expect(resolveOpencodeZenModelApi("claude-opus-4-6")).toBe("anthropic-messages");
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe("google-generative-ai");
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses");
expect(resolveOpencodeZenModelApi("alpha-gd4")).toBe("openai-completions");
@@ -53,13 +53,14 @@ describe("getOpencodeZenStaticFallbackModels", () => {
it("returns an array of models", () => {
const models = getOpencodeZenStaticFallbackModels();
expect(Array.isArray(models)).toBe(true);
expect(models.length).toBe(9);
expect(models.length).toBe(10);
});
it("includes Claude, GPT, Gemini, and GLM models", () => {
const models = getOpencodeZenStaticFallbackModels();
const ids = models.map((m) => m.id);
expect(ids).toContain("claude-opus-4-6");
expect(ids).toContain("claude-opus-4-5");
expect(ids).toContain("gpt-5.2");
expect(ids).toContain("gpt-5.1-codex");
@@ -83,15 +84,16 @@ describe("getOpencodeZenStaticFallbackModels", () => {
describe("OPENCODE_ZEN_MODEL_ALIASES", () => {
it("has expected aliases", () => {
expect(OPENCODE_ZEN_MODEL_ALIASES.opus).toBe("claude-opus-4-5");
expect(OPENCODE_ZEN_MODEL_ALIASES.opus).toBe("claude-opus-4-6");
expect(OPENCODE_ZEN_MODEL_ALIASES.codex).toBe("gpt-5.1-codex");
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt5).toBe("gpt-5.2");
expect(OPENCODE_ZEN_MODEL_ALIASES.gemini).toBe("gemini-3-pro");
expect(OPENCODE_ZEN_MODEL_ALIASES.glm).toBe("glm-4.7");
expect(OPENCODE_ZEN_MODEL_ALIASES["opus-4.5"]).toBe("claude-opus-4-5");
// Legacy aliases (kept for backward compatibility).
expect(OPENCODE_ZEN_MODEL_ALIASES.sonnet).toBe("claude-opus-4-5");
expect(OPENCODE_ZEN_MODEL_ALIASES.haiku).toBe("claude-opus-4-5");
expect(OPENCODE_ZEN_MODEL_ALIASES.sonnet).toBe("claude-opus-4-6");
expect(OPENCODE_ZEN_MODEL_ALIASES.haiku).toBe("claude-opus-4-6");
expect(OPENCODE_ZEN_MODEL_ALIASES.gpt4).toBe("gpt-5.1");
expect(OPENCODE_ZEN_MODEL_ALIASES.o1).toBe("gpt-5.2");
expect(OPENCODE_ZEN_MODEL_ALIASES["gemini-2.5"]).toBe("gemini-3-pro");

View File

@@ -11,7 +11,7 @@
import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5";
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-6";
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`;
// Cache for fetched models (1 hour TTL)
@@ -21,19 +21,20 @@ const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
/**
* Model aliases for convenient shortcuts.
* Users can use "opus" instead of "claude-opus-4-5", etc.
* Users can use "opus" instead of "claude-opus-4-6", etc.
*/
export const OPENCODE_ZEN_MODEL_ALIASES: Record<string, string> = {
// Claude
opus: "claude-opus-4-5",
opus: "claude-opus-4-6",
"opus-4.6": "claude-opus-4-6",
"opus-4.5": "claude-opus-4-5",
"opus-4": "claude-opus-4-5",
"opus-4": "claude-opus-4-6",
// Legacy Claude aliases (OpenCode Zen rotates model catalogs; keep old keys working).
sonnet: "claude-opus-4-5",
"sonnet-4": "claude-opus-4-5",
haiku: "claude-opus-4-5",
"haiku-3.5": "claude-opus-4-5",
sonnet: "claude-opus-4-6",
"sonnet-4": "claude-opus-4-6",
haiku: "claude-opus-4-6",
"haiku-3.5": "claude-opus-4-6",
// GPT-5.x family
gpt5: "gpt-5.2",
@@ -119,6 +120,7 @@ const MODEL_COSTS: Record<
cacheRead: 0.107,
cacheWrite: 0,
},
"claude-opus-4-6": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
"claude-opus-4-5": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
"gemini-3-pro": { input: 2, output: 12, cacheRead: 0.2, cacheWrite: 0 },
"gpt-5.1-codex-mini": {
@@ -143,6 +145,7 @@ const DEFAULT_COST = { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 };
const MODEL_CONTEXT_WINDOWS: Record<string, number> = {
"gpt-5.1-codex": 400000,
"claude-opus-4-6": 1000000,
"claude-opus-4-5": 200000,
"gemini-3-pro": 1048576,
"gpt-5.1-codex-mini": 400000,
@@ -159,6 +162,7 @@ function getDefaultContextWindow(modelId: string): number {
const MODEL_MAX_TOKENS: Record<string, number> = {
"gpt-5.1-codex": 128000,
"claude-opus-4-6": 128000,
"claude-opus-4-5": 64000,
"gemini-3-pro": 65536,
"gpt-5.1-codex-mini": 128000,
@@ -195,6 +199,7 @@ function buildModelDefinition(modelId: string): ModelDefinitionConfig {
*/
const MODEL_NAMES: Record<string, string> = {
"gpt-5.1-codex": "GPT-5.1 Codex",
"claude-opus-4-6": "Claude Opus 4.6",
"claude-opus-4-5": "Claude Opus 4.5",
"gemini-3-pro": "Gemini 3 Pro",
"gpt-5.1-codex-mini": "GPT-5.1 Codex Mini",
@@ -222,6 +227,7 @@ function formatModelName(modelId: string): string {
export function getOpencodeZenStaticFallbackModels(): ModelDefinitionConfig[] {
const modelIds = [
"gpt-5.1-codex",
"claude-opus-4-6",
"claude-opus-4-5",
"gemini-3-pro",
"gpt-5.1-codex-mini",

View File

@@ -53,7 +53,7 @@ describe("image tool implicit imageModel config", () => {
};
expect(resolveImageModelConfigForTool({ cfg, agentDir })).toEqual({
primary: "minimax/MiniMax-VL-01",
fallbacks: ["openai/gpt-5-mini", "anthropic/claude-opus-4-6"],
fallbacks: ["openai/gpt-5-mini", "anthropic/claude-opus-4-5"],
});
expect(createImageTool({ config: cfg, agentDir })).not.toBeNull();
});

View File

@@ -24,6 +24,8 @@ import {
} from "./image-tool.helpers.js";
const DEFAULT_PROMPT = "Describe the image.";
const ANTHROPIC_IMAGE_PRIMARY = "anthropic/claude-opus-4-6";
const ANTHROPIC_IMAGE_FALLBACK = "anthropic/claude-opus-4-5";
export const __testing = {
decodeDataUrl,
@@ -117,7 +119,7 @@ export function resolveImageModelConfigForTool(params: {
} else if (primary.provider === "openai" && openaiOk) {
preferred = "openai/gpt-5-mini";
} else if (primary.provider === "anthropic" && anthropicOk) {
preferred = "anthropic/claude-opus-4-6";
preferred = ANTHROPIC_IMAGE_PRIMARY;
}
if (preferred?.trim()) {
@@ -125,7 +127,7 @@ export function resolveImageModelConfigForTool(params: {
addFallback("openai/gpt-5-mini");
}
if (anthropicOk) {
addFallback("anthropic/claude-opus-4-6");
addFallback(ANTHROPIC_IMAGE_FALLBACK);
}
// Don't duplicate primary in fallbacks.
const pruned = fallbacks.filter((ref) => ref !== preferred);
@@ -138,7 +140,7 @@ export function resolveImageModelConfigForTool(params: {
// Cross-provider fallback when we can't pair with the primary provider.
if (openaiOk) {
if (anthropicOk) {
addFallback("anthropic/claude-opus-4-6");
addFallback(ANTHROPIC_IMAGE_FALLBACK);
}
return {
primary: "openai/gpt-5-mini",
@@ -146,7 +148,10 @@ export function resolveImageModelConfigForTool(params: {
};
}
if (anthropicOk) {
return { primary: "anthropic/claude-opus-4-6" };
return {
primary: ANTHROPIC_IMAGE_PRIMARY,
fallbacks: [ANTHROPIC_IMAGE_FALLBACK],
};
}
return null;