chore: apply local workspace updates (#9911)

* chore: apply local workspace updates

* fix: resolve prep findings after rebase (#9898) (thanks @gumadeiras)

* refactor: centralize model allowlist normalization (#9898) (thanks @gumadeiras)

* fix: guard model allowlist initialization (#9911)

* docs: update changelog scope for #9911

* docs: remove model names from changelog entry (#9911)

* fix: satisfy type-aware lint in model allowlist (#9911)
This commit is contained in:
Gustavo Madeira Santana
2026-02-05 16:54:44 -05:00
committed by GitHub
parent 93b450349f
commit 4629054403
72 changed files with 722 additions and 251 deletions

View File

@@ -7,6 +7,7 @@ import {
normalizeApiKeyInput,
validateApiKeyInput,
} from "./auth-choice.api-key.js";
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
import { isRemoteEnvironment } from "./oauth-env.js";
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
import { applyAuthProfileConfig, writeOAuthCredentials } from "./onboard-auth.js";
@@ -15,6 +16,11 @@ import {
applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js";
import {
applyOpenAIConfig,
applyOpenAIProviderConfig,
OPENAI_DEFAULT_MODEL,
} from "./openai-model-default.js";
export async function applyAuthChoiceOpenAI(
params: ApplyAuthChoiceParams,
@@ -25,6 +31,18 @@ export async function applyAuthChoiceOpenAI(
}
if (authChoice === "openai-api-key") {
let nextConfig = params.config;
let agentModelOverride: string | undefined;
const noteAgentModel = async (model: string) => {
if (!params.agentId) {
return;
}
await params.prompter.note(
`Default model set to ${model} for agent "${params.agentId}".`,
"Model configured",
);
};
const envKey = resolveEnvApiKey("openai");
if (envKey) {
const useExisting = await params.prompter.confirm({
@@ -43,7 +61,19 @@ export async function applyAuthChoiceOpenAI(
`Copied OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
"OpenAI API key",
);
return { config: params.config };
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: OPENAI_DEFAULT_MODEL,
applyDefaultConfig: applyOpenAIConfig,
applyProviderConfig: applyOpenAIProviderConfig,
noteDefault: OPENAI_DEFAULT_MODEL,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
return { config: nextConfig, agentModelOverride };
}
}
@@ -67,7 +97,19 @@ export async function applyAuthChoiceOpenAI(
`Saved OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
"OpenAI API key",
);
return { config: params.config };
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: OPENAI_DEFAULT_MODEL,
applyDefaultConfig: applyOpenAIConfig,
applyProviderConfig: applyOpenAIProviderConfig,
noteDefault: OPENAI_DEFAULT_MODEL,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
return { config: nextConfig, agentModelOverride };
}
if (params.authChoice === "openai-codex") {

View File

@@ -0,0 +1,77 @@
import { describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
function makePrompter(): WizardPrompter {
return {
intro: async () => {},
outro: async () => {},
note: async () => {},
select: async () => "",
multiselect: async () => [],
text: async () => "",
confirm: async () => false,
progress: () => ({ update: () => {}, stop: () => {} }),
};
}
describe("applyDefaultModelChoice", () => {
it("ensures allowlist entry exists when returning an agent override", async () => {
const defaultModel = "vercel-ai-gateway/anthropic/claude-opus-4.6";
const noteAgentModel = vi.fn(async () => {});
const applied = await applyDefaultModelChoice({
config: {},
setDefaultModel: false,
defaultModel,
// Simulate a provider function that does not explicitly add the entry.
applyProviderConfig: (config: OpenClawConfig) => config,
applyDefaultConfig: (config: OpenClawConfig) => config,
noteAgentModel,
prompter: makePrompter(),
});
expect(noteAgentModel).toHaveBeenCalledWith(defaultModel);
expect(applied.agentModelOverride).toBe(defaultModel);
expect(applied.config.agents?.defaults?.models?.[defaultModel]).toEqual({});
});
it("adds canonical allowlist key for anthropic aliases", async () => {
const defaultModel = "anthropic/opus-4.6";
const applied = await applyDefaultModelChoice({
config: {},
setDefaultModel: false,
defaultModel,
applyProviderConfig: (config: OpenClawConfig) => config,
applyDefaultConfig: (config: OpenClawConfig) => config,
noteAgentModel: async () => {},
prompter: makePrompter(),
});
expect(applied.config.agents?.defaults?.models?.[defaultModel]).toEqual({});
expect(applied.config.agents?.defaults?.models?.["anthropic/claude-opus-4-6"]).toEqual({});
});
it("uses applyDefaultConfig path when setDefaultModel is true", async () => {
const defaultModel = "openai/gpt-5.1-codex";
const applied = await applyDefaultModelChoice({
config: {},
setDefaultModel: true,
defaultModel,
applyProviderConfig: (config: OpenClawConfig) => config,
applyDefaultConfig: () => ({
agents: {
defaults: {
model: { primary: defaultModel },
},
},
}),
noteDefault: defaultModel,
noteAgentModel: async () => {},
prompter: makePrompter(),
});
expect(applied.agentModelOverride).toBeUndefined();
expect(applied.config.agents?.defaults?.model).toEqual({ primary: defaultModel });
});
});

View File

@@ -1,5 +1,6 @@
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { ensureModelAllowlistEntry } from "./model-allowlist.js";
export async function applyDefaultModelChoice(params: {
config: OpenClawConfig;
@@ -20,6 +21,10 @@ export async function applyDefaultModelChoice(params: {
}
const next = params.applyProviderConfig(params.config);
const nextWithModel = ensureModelAllowlistEntry({
cfg: next,
modelRef: params.defaultModel,
});
await params.noteAgentModel(params.defaultModel);
return { config: next, agentModelOverride: params.defaultModel };
return { config: nextWithModel, agentModelOverride: params.defaultModel };
}

View File

@@ -284,7 +284,7 @@ describe("applyAuthChoice", () => {
);
expect(result.config.agents?.defaults?.model?.primary).toBe("anthropic/claude-opus-4-5");
expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined();
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-5");
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-6");
});
it("uses existing OPENROUTER_API_KEY when selecting openrouter-api-key", async () => {
@@ -398,7 +398,7 @@ describe("applyAuthChoice", () => {
mode: "api_key",
});
expect(result.config.agents?.defaults?.model?.primary).toBe(
"vercel-ai-gateway/anthropic/claude-opus-4.5",
"vercel-ai-gateway/anthropic/claude-opus-4.6",
);
const authProfilePath = authProfilePathFor(requireAgentDir());

View File

@@ -0,0 +1,41 @@
import type { OpenClawConfig } from "../config/config.js";
import { DEFAULT_PROVIDER } from "../agents/defaults.js";
import { resolveAllowlistModelKey } from "../agents/model-selection.js";
export function ensureModelAllowlistEntry(params: {
cfg: OpenClawConfig;
modelRef: string;
defaultProvider?: string;
}): OpenClawConfig {
const rawModelRef = params.modelRef.trim();
if (!rawModelRef) {
return params.cfg;
}
const models = { ...params.cfg.agents?.defaults?.models };
const keySet = new Set<string>([rawModelRef]);
const canonicalKey = resolveAllowlistModelKey(
rawModelRef,
params.defaultProvider ?? DEFAULT_PROVIDER,
);
if (canonicalKey) {
keySet.add(canonicalKey);
}
for (const key of keySet) {
models[key] = {
...models[key],
};
}
return {
...params.cfg,
agents: {
...params.cfg.agents,
defaults: {
...params.cfg.agents?.defaults,
models,
},
},
};
}

View File

@@ -12,6 +12,7 @@ import {
resolveConfiguredModelRef,
} from "../agents/model-selection.js";
import { formatTokenK } from "./models/shared.js";
import { OPENAI_CODEX_DEFAULT_MODEL } from "./openai-codex-model-default.js";
const KEEP_VALUE = "__keep__";
const MANUAL_VALUE = "__manual__";
@@ -331,7 +332,7 @@ export async function promptModelAllowlist(params: {
params.message ??
"Allowlist models (comma-separated provider/model; blank to keep current)",
initialValue: existingKeys.join(", "),
placeholder: "openai-codex/gpt-5.2, anthropic/claude-opus-4-6",
placeholder: `${OPENAI_CODEX_DEFAULT_MODEL}, anthropic/claude-opus-4-6`,
});
const parsed = String(raw ?? "")
.split(",")

View File

@@ -117,7 +117,7 @@ export async function setVeniceApiKey(key: string, agentDir?: string) {
export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
export const XIAOMI_DEFAULT_MODEL_REF = "xiaomi/mimo-v2-flash";
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.5";
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.6";
export async function setZaiApiKey(key: string, agentDir?: string) {
// Write to resolved agent dir so gateway finds credentials on startup.

View File

@@ -393,7 +393,7 @@ describe("applyOpencodeZenProviderConfig", () => {
it("adds allowlist entry for the default model", () => {
const cfg = applyOpencodeZenProviderConfig({});
const models = cfg.agents?.defaults?.models ?? {};
expect(Object.keys(models)).toContain("opencode/claude-opus-4-5");
expect(Object.keys(models)).toContain("opencode/claude-opus-4-6");
});
it("preserves existing alias for the default model", () => {
@@ -401,19 +401,19 @@ describe("applyOpencodeZenProviderConfig", () => {
agents: {
defaults: {
models: {
"opencode/claude-opus-4-5": { alias: "My Opus" },
"opencode/claude-opus-4-6": { alias: "My Opus" },
},
},
},
});
expect(cfg.agents?.defaults?.models?.["opencode/claude-opus-4-5"]?.alias).toBe("My Opus");
expect(cfg.agents?.defaults?.models?.["opencode/claude-opus-4-6"]?.alias).toBe("My Opus");
});
});
describe("applyOpencodeZenConfig", () => {
it("sets correct primary model", () => {
const cfg = applyOpencodeZenConfig({});
expect(cfg.agents?.defaults?.model?.primary).toBe("opencode/claude-opus-4-5");
expect(cfg.agents?.defaults?.model?.primary).toBe("opencode/claude-opus-4-6");
});
it("preserves existing model fallbacks", () => {

View File

@@ -66,7 +66,7 @@ describe("onboard (non-interactive): Vercel AI Gateway", () => {
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.provider).toBe("vercel-ai-gateway");
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.mode).toBe("api_key");
expect(cfg.agents?.defaults?.model?.primary).toBe(
"vercel-ai-gateway/anthropic/claude-opus-4.5",
"vercel-ai-gateway/anthropic/claude-opus-4.6",
);
const { ensureAuthProfileStore } = await import("../agents/auth-profiles.js");

View File

@@ -0,0 +1,77 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it, vi } from "vitest";
import { OPENAI_DEFAULT_MODEL } from "./openai-model-default.js";
describe("onboard (non-interactive): OpenAI API key", () => {
it("stores OPENAI_API_KEY and configures the OpenAI default model", async () => {
const prev = {
home: process.env.HOME,
stateDir: process.env.OPENCLAW_STATE_DIR,
configPath: process.env.OPENCLAW_CONFIG_PATH,
skipChannels: process.env.OPENCLAW_SKIP_CHANNELS,
skipGmail: process.env.OPENCLAW_SKIP_GMAIL_WATCHER,
skipCron: process.env.OPENCLAW_SKIP_CRON,
skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST,
token: process.env.OPENCLAW_GATEWAY_TOKEN,
password: process.env.OPENCLAW_GATEWAY_PASSWORD,
};
process.env.OPENCLAW_SKIP_CHANNELS = "1";
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = "1";
process.env.OPENCLAW_SKIP_CRON = "1";
process.env.OPENCLAW_SKIP_CANVAS_HOST = "1";
delete process.env.OPENCLAW_GATEWAY_TOKEN;
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-onboard-openai-"));
process.env.HOME = tempHome;
process.env.OPENCLAW_STATE_DIR = tempHome;
process.env.OPENCLAW_CONFIG_PATH = path.join(tempHome, "openclaw.json");
vi.resetModules();
const runtime = {
log: () => {},
error: (msg: string) => {
throw new Error(msg);
},
exit: (code: number) => {
throw new Error(`exit:${code}`);
},
};
try {
const { runNonInteractiveOnboarding } = await import("./onboard-non-interactive.js");
await runNonInteractiveOnboarding(
{
nonInteractive: true,
authChoice: "openai-api-key",
openaiApiKey: "sk-openai-test",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const { CONFIG_PATH } = await import("../config/config.js");
const cfg = JSON.parse(await fs.readFile(CONFIG_PATH, "utf8")) as {
agents?: { defaults?: { model?: { primary?: string } } };
};
expect(cfg.agents?.defaults?.model?.primary).toBe(OPENAI_DEFAULT_MODEL);
} finally {
await fs.rm(tempHome, { recursive: true, force: true });
process.env.HOME = prev.home;
process.env.OPENCLAW_STATE_DIR = prev.stateDir;
process.env.OPENCLAW_CONFIG_PATH = prev.configPath;
process.env.OPENCLAW_SKIP_CHANNELS = prev.skipChannels;
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = prev.skipGmail;
process.env.OPENCLAW_SKIP_CRON = prev.skipCron;
process.env.OPENCLAW_SKIP_CANVAS_HOST = prev.skipCanvas;
process.env.OPENCLAW_GATEWAY_TOKEN = prev.token;
process.env.OPENCLAW_GATEWAY_PASSWORD = prev.password;
}
}, 60_000);
});

View File

@@ -37,6 +37,7 @@ import {
setXiaomiApiKey,
setZaiApiKey,
} from "../../onboard-auth.js";
import { applyOpenAIConfig } from "../../openai-model-default.js";
import { resolveNonInteractiveApiKey } from "../api-keys.js";
export async function applyNonInteractiveAuthChoice(params: {
@@ -234,7 +235,7 @@ export async function applyNonInteractiveAuthChoice(params: {
const result = upsertSharedEnvVar({ key: "OPENAI_API_KEY", value: key });
process.env.OPENAI_API_KEY = key;
runtime.log(`Saved OPENAI_API_KEY to ${shortenHomePath(result.path)}`);
return nextConfig;
return applyOpenAIConfig(nextConfig);
}
if (authChoice === "openrouter-api-key") {

View File

@@ -4,6 +4,7 @@ import {
applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js";
import { OPENAI_DEFAULT_MODEL } from "./openai-model-default.js";
describe("applyOpenAICodexModelDefault", () => {
it("sets openai-codex default when model is unset", () => {
@@ -17,7 +18,7 @@ describe("applyOpenAICodexModelDefault", () => {
it("sets openai-codex default when model is openai/*", () => {
const cfg: OpenClawConfig = {
agents: { defaults: { model: "openai/gpt-5.2" } },
agents: { defaults: { model: OPENAI_DEFAULT_MODEL } },
};
const applied = applyOpenAICodexModelDefault(cfg);
expect(applied.changed).toBe(true);
@@ -28,7 +29,7 @@ describe("applyOpenAICodexModelDefault", () => {
it("does not override openai-codex/*", () => {
const cfg: OpenClawConfig = {
agents: { defaults: { model: "openai-codex/gpt-5.2" } },
agents: { defaults: { model: OPENAI_CODEX_DEFAULT_MODEL } },
};
const applied = applyOpenAICodexModelDefault(cfg);
expect(applied.changed).toBe(false);

View File

@@ -1,7 +1,7 @@
import type { OpenClawConfig } from "../config/config.js";
import type { AgentModelListConfig } from "../config/types.js";
export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.2";
export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.3-codex";
function shouldSetOpenAICodexModel(model?: string): boolean {
const trimmed = model?.trim();

View File

@@ -0,0 +1,40 @@
import { describe, expect, it } from "vitest";
import {
applyOpenAIConfig,
applyOpenAIProviderConfig,
OPENAI_DEFAULT_MODEL,
} from "./openai-model-default.js";
describe("applyOpenAIProviderConfig", () => {
it("adds allowlist entry for default model", () => {
const next = applyOpenAIProviderConfig({});
expect(Object.keys(next.agents?.defaults?.models ?? {})).toContain(OPENAI_DEFAULT_MODEL);
});
it("preserves existing alias for default model", () => {
const next = applyOpenAIProviderConfig({
agents: {
defaults: {
models: {
[OPENAI_DEFAULT_MODEL]: { alias: "My GPT" },
},
},
},
});
expect(next.agents?.defaults?.models?.[OPENAI_DEFAULT_MODEL]?.alias).toBe("My GPT");
});
});
describe("applyOpenAIConfig", () => {
it("sets default when model is unset", () => {
const next = applyOpenAIConfig({});
expect(next.agents?.defaults?.model).toEqual({ primary: OPENAI_DEFAULT_MODEL });
});
it("overrides model.primary when model object already exists", () => {
const next = applyOpenAIConfig({
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6", fallback: [] } } },
});
expect(next.agents?.defaults?.model).toEqual({ primary: OPENAI_DEFAULT_MODEL, fallback: [] });
});
});

View File

@@ -0,0 +1,47 @@
import type { OpenClawConfig } from "../config/config.js";
import { ensureModelAllowlistEntry } from "./model-allowlist.js";
export const OPENAI_DEFAULT_MODEL = "openai/gpt-5.1-codex";
export function applyOpenAIProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = ensureModelAllowlistEntry({
cfg,
modelRef: OPENAI_DEFAULT_MODEL,
});
const models = { ...next.agents?.defaults?.models };
models[OPENAI_DEFAULT_MODEL] = {
...models[OPENAI_DEFAULT_MODEL],
alias: models[OPENAI_DEFAULT_MODEL]?.alias ?? "GPT",
};
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
models,
},
},
};
}
export function applyOpenAIConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = applyOpenAIProviderConfig(cfg);
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model:
next.agents?.defaults?.model && typeof next.agents.defaults.model === "object"
? {
...next.agents.defaults.model,
primary: OPENAI_DEFAULT_MODEL,
}
: { primary: OPENAI_DEFAULT_MODEL },
},
},
};
}

View File

@@ -1,8 +1,11 @@
import type { OpenClawConfig } from "../config/config.js";
import type { AgentModelListConfig } from "../config/types.js";
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-5";
const LEGACY_OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-6";
const LEGACY_OPENCODE_ZEN_DEFAULT_MODELS = new Set([
"opencode/claude-opus-4-5",
"opencode-zen/claude-opus-4-5",
]);
function resolvePrimaryModel(model?: AgentModelListConfig | string): string | undefined {
if (typeof model === "string") {
@@ -20,7 +23,9 @@ export function applyOpencodeZenModelDefault(cfg: OpenClawConfig): {
} {
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
const normalizedCurrent =
current === LEGACY_OPENCODE_ZEN_DEFAULT_MODEL ? OPENCODE_ZEN_DEFAULT_MODEL : current;
current && LEGACY_OPENCODE_ZEN_DEFAULT_MODELS.has(current)
? OPENCODE_ZEN_DEFAULT_MODEL
: current;
if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) {
return { next: cfg, changed: false };
}