Merge branch 'main' into qianfan

This commit is contained in:
ide-rea
2026-02-06 17:58:28 +08:00
committed by GitHub
413 changed files with 26165 additions and 6070 deletions

View File

@@ -42,5 +42,14 @@ export function resolveAgentRunContext(opts: AgentCommandOpts): AgentRunContext
merged.currentThreadTs = String(opts.threadId);
}
// Populate currentChannelId from the outbound target so that
// resolveTelegramAutoThreadId can match the originating chat.
if (!merged.currentChannelId && opts.to) {
const trimmedTo = opts.to.trim();
if (trimmedTo) {
merged.currentChannelId = trimmedTo;
}
}
return merged;
}

View File

@@ -114,4 +114,14 @@ describe("buildAuthChoiceOptions", () => {
expect(options.some((opt) => opt.value === "qwen-portal")).toBe(true);
});
it("includes xAI auth choice", () => {
const store: AuthProfileStore = { version: 1, profiles: {} };
const options = buildAuthChoiceOptions({
store,
includeSkip: false,
});
expect(options.some((opt) => opt.value === "xai-api-key")).toBe(true);
});
});

View File

@@ -24,6 +24,7 @@ export type AuthChoiceGroupId =
| "venice"
| "qwen"
| "qianfan";
| "xai";
export type AuthChoiceGroup = {
value: AuthChoiceGroupId;
@@ -38,6 +39,12 @@ const AUTH_CHOICE_GROUP_DEFS: {
hint?: string;
choices: AuthChoice[];
}[] = [
{
value: "xai",
label: "xAI (Grok)",
hint: "API key",
choices: ["xai-api-key"],
},
{
value: "openai",
label: "OpenAI",
@@ -156,6 +163,7 @@ export function buildAuthChoiceOptions(params: {
options.push({ value: "chutes", label: "Chutes (OAuth)" });
options.push({ value: "openai-api-key", label: "OpenAI API key" });
options.push({ value: "openrouter-api-key", label: "OpenRouter API key" });
options.push({ value: "xai-api-key", label: "xAI (Grok) API key" });
options.push({
value: "ai-gateway-api-key",
label: "Vercel AI Gateway API key",

View File

@@ -758,7 +758,7 @@ export async function applyAuthChoiceApiProviders(
[
"OpenCode Zen provides access to Claude, GPT, Gemini, and more models.",
"Get your API key at: https://opencode.ai/auth",
"Requires an active OpenCode Zen subscription.",
"OpenCode Zen bills per request. Check your OpenCode dashboard for details.",
].join("\n"),
"OpenCode Zen",
);

View File

@@ -7,6 +7,7 @@ import {
normalizeApiKeyInput,
validateApiKeyInput,
} from "./auth-choice.api-key.js";
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
import { isRemoteEnvironment } from "./oauth-env.js";
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
import { applyAuthProfileConfig, writeOAuthCredentials } from "./onboard-auth.js";
@@ -15,6 +16,11 @@ import {
applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js";
import {
applyOpenAIConfig,
applyOpenAIProviderConfig,
OPENAI_DEFAULT_MODEL,
} from "./openai-model-default.js";
export async function applyAuthChoiceOpenAI(
params: ApplyAuthChoiceParams,
@@ -25,6 +31,18 @@ export async function applyAuthChoiceOpenAI(
}
if (authChoice === "openai-api-key") {
let nextConfig = params.config;
let agentModelOverride: string | undefined;
const noteAgentModel = async (model: string) => {
if (!params.agentId) {
return;
}
await params.prompter.note(
`Default model set to ${model} for agent "${params.agentId}".`,
"Model configured",
);
};
const envKey = resolveEnvApiKey("openai");
if (envKey) {
const useExisting = await params.prompter.confirm({
@@ -43,7 +61,19 @@ export async function applyAuthChoiceOpenAI(
`Copied OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
"OpenAI API key",
);
return { config: params.config };
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: OPENAI_DEFAULT_MODEL,
applyDefaultConfig: applyOpenAIConfig,
applyProviderConfig: applyOpenAIProviderConfig,
noteDefault: OPENAI_DEFAULT_MODEL,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
return { config: nextConfig, agentModelOverride };
}
}
@@ -67,7 +97,19 @@ export async function applyAuthChoiceOpenAI(
`Saved OPENAI_API_KEY to ${result.path} for launchd compatibility.`,
"OpenAI API key",
);
return { config: params.config };
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: OPENAI_DEFAULT_MODEL,
applyDefaultConfig: applyOpenAIConfig,
applyProviderConfig: applyOpenAIProviderConfig,
noteDefault: OPENAI_DEFAULT_MODEL,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
return { config: nextConfig, agentModelOverride };
}
if (params.authChoice === "openai-codex") {

View File

@@ -12,6 +12,7 @@ import { applyAuthChoiceMiniMax } from "./auth-choice.apply.minimax.js";
import { applyAuthChoiceOAuth } from "./auth-choice.apply.oauth.js";
import { applyAuthChoiceOpenAI } from "./auth-choice.apply.openai.js";
import { applyAuthChoiceQwenPortal } from "./auth-choice.apply.qwen-portal.js";
import { applyAuthChoiceXAI } from "./auth-choice.apply.xai.js";
export type ApplyAuthChoiceParams = {
authChoice: AuthChoice;
@@ -27,6 +28,7 @@ export type ApplyAuthChoiceParams = {
cloudflareAiGatewayAccountId?: string;
cloudflareAiGatewayGatewayId?: string;
cloudflareAiGatewayApiKey?: string;
xaiApiKey?: string;
};
};
@@ -49,6 +51,7 @@ export async function applyAuthChoice(
applyAuthChoiceGoogleGeminiCli,
applyAuthChoiceCopilotProxy,
applyAuthChoiceQwenPortal,
applyAuthChoiceXAI,
];
for (const handler of handlers) {

View File

@@ -0,0 +1,86 @@
import type { ApplyAuthChoiceParams, ApplyAuthChoiceResult } from "./auth-choice.apply.js";
import { resolveEnvApiKey } from "../agents/model-auth.js";
import {
formatApiKeyPreview,
normalizeApiKeyInput,
validateApiKeyInput,
} from "./auth-choice.api-key.js";
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
import {
applyAuthProfileConfig,
applyXaiConfig,
applyXaiProviderConfig,
setXaiApiKey,
XAI_DEFAULT_MODEL_REF,
} from "./onboard-auth.js";
export async function applyAuthChoiceXAI(
params: ApplyAuthChoiceParams,
): Promise<ApplyAuthChoiceResult | null> {
if (params.authChoice !== "xai-api-key") {
return null;
}
let nextConfig = params.config;
let agentModelOverride: string | undefined;
const noteAgentModel = async (model: string) => {
if (!params.agentId) {
return;
}
await params.prompter.note(
`Default model set to ${model} for agent "${params.agentId}".`,
"Model configured",
);
};
let hasCredential = false;
const optsKey = params.opts?.xaiApiKey?.trim();
if (optsKey) {
setXaiApiKey(normalizeApiKeyInput(optsKey), params.agentDir);
hasCredential = true;
}
if (!hasCredential) {
const envKey = resolveEnvApiKey("xai");
if (envKey) {
const useExisting = await params.prompter.confirm({
message: `Use existing XAI_API_KEY (${envKey.source}, ${formatApiKeyPreview(envKey.apiKey)})?`,
initialValue: true,
});
if (useExisting) {
setXaiApiKey(envKey.apiKey, params.agentDir);
hasCredential = true;
}
}
}
if (!hasCredential) {
const key = await params.prompter.text({
message: "Enter xAI API key",
validate: validateApiKeyInput,
});
setXaiApiKey(normalizeApiKeyInput(String(key)), params.agentDir);
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "xai:default",
provider: "xai",
mode: "api_key",
});
{
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: XAI_DEFAULT_MODEL_REF,
applyDefaultConfig: applyXaiConfig,
applyProviderConfig: applyXaiProviderConfig,
noteDefault: XAI_DEFAULT_MODEL_REF,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
}
return { config: nextConfig, agentModelOverride };
}

View File

@@ -0,0 +1,77 @@
import { describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
function makePrompter(): WizardPrompter {
return {
intro: async () => {},
outro: async () => {},
note: async () => {},
select: async () => "",
multiselect: async () => [],
text: async () => "",
confirm: async () => false,
progress: () => ({ update: () => {}, stop: () => {} }),
};
}
describe("applyDefaultModelChoice", () => {
it("ensures allowlist entry exists when returning an agent override", async () => {
const defaultModel = "vercel-ai-gateway/anthropic/claude-opus-4.6";
const noteAgentModel = vi.fn(async () => {});
const applied = await applyDefaultModelChoice({
config: {},
setDefaultModel: false,
defaultModel,
// Simulate a provider function that does not explicitly add the entry.
applyProviderConfig: (config: OpenClawConfig) => config,
applyDefaultConfig: (config: OpenClawConfig) => config,
noteAgentModel,
prompter: makePrompter(),
});
expect(noteAgentModel).toHaveBeenCalledWith(defaultModel);
expect(applied.agentModelOverride).toBe(defaultModel);
expect(applied.config.agents?.defaults?.models?.[defaultModel]).toEqual({});
});
it("adds canonical allowlist key for anthropic aliases", async () => {
const defaultModel = "anthropic/opus-4.6";
const applied = await applyDefaultModelChoice({
config: {},
setDefaultModel: false,
defaultModel,
applyProviderConfig: (config: OpenClawConfig) => config,
applyDefaultConfig: (config: OpenClawConfig) => config,
noteAgentModel: async () => {},
prompter: makePrompter(),
});
expect(applied.config.agents?.defaults?.models?.[defaultModel]).toEqual({});
expect(applied.config.agents?.defaults?.models?.["anthropic/claude-opus-4-6"]).toEqual({});
});
it("uses applyDefaultConfig path when setDefaultModel is true", async () => {
const defaultModel = "openai/gpt-5.1-codex";
const applied = await applyDefaultModelChoice({
config: {},
setDefaultModel: true,
defaultModel,
applyProviderConfig: (config: OpenClawConfig) => config,
applyDefaultConfig: () => ({
agents: {
defaults: {
model: { primary: defaultModel },
},
},
}),
noteDefault: defaultModel,
noteAgentModel: async () => {},
prompter: makePrompter(),
});
expect(applied.agentModelOverride).toBeUndefined();
expect(applied.config.agents?.defaults?.model).toEqual({ primary: defaultModel });
});
});

View File

@@ -1,5 +1,6 @@
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { ensureModelAllowlistEntry } from "./model-allowlist.js";
export async function applyDefaultModelChoice(params: {
config: OpenClawConfig;
@@ -20,6 +21,10 @@ export async function applyDefaultModelChoice(params: {
}
const next = params.applyProviderConfig(params.config);
const nextWithModel = ensureModelAllowlistEntry({
cfg: next,
modelRef: params.defaultModel,
});
await params.noteAgentModel(params.defaultModel);
return { config: next, agentModelOverride: params.defaultModel };
return { config: nextWithModel, agentModelOverride: params.defaultModel };
}

View File

@@ -30,6 +30,7 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
"minimax-api-lightning": "minimax",
minimax: "lmstudio",
"opencode-zen": "opencode",
"xai-api-key": "xai",
"qwen-portal": "qwen-portal",
"minimax-portal": "minimax-portal",
"qianfan-api-key": "qianfan",

View File

@@ -193,6 +193,60 @@ describe("applyAuthChoice", () => {
expect(parsed.profiles?.["synthetic:default"]?.key).toBe("sk-synthetic-test");
});
it("does not override the global default model when selecting xai-api-key without setDefaultModel", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;
process.env.OPENCLAW_AGENT_DIR = path.join(tempStateDir, "agent");
process.env.PI_CODING_AGENT_DIR = process.env.OPENCLAW_AGENT_DIR;
const text = vi.fn().mockResolvedValue("sk-xai-test");
const select: WizardPrompter["select"] = vi.fn(
async (params) => params.options[0]?.value as never,
);
const multiselect: WizardPrompter["multiselect"] = vi.fn(async () => []);
const prompter: WizardPrompter = {
intro: vi.fn(noopAsync),
outro: vi.fn(noopAsync),
note: vi.fn(noopAsync),
select,
multiselect,
text,
confirm: vi.fn(async () => false),
progress: vi.fn(() => ({ update: noop, stop: noop })),
};
const runtime: RuntimeEnv = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn((code: number) => {
throw new Error(`exit:${code}`);
}),
};
const result = await applyAuthChoice({
authChoice: "xai-api-key",
config: { agents: { defaults: { model: { primary: "openai/gpt-4o-mini" } } } },
prompter,
runtime,
setDefaultModel: false,
agentId: "agent-1",
});
expect(text).toHaveBeenCalledWith(expect.objectContaining({ message: "Enter xAI API key" }));
expect(result.config.auth?.profiles?.["xai:default"]).toMatchObject({
provider: "xai",
mode: "api_key",
});
expect(result.config.agents?.defaults?.model?.primary).toBe("openai/gpt-4o-mini");
expect(result.agentModelOverride).toBe("xai/grok-4");
const authProfilePath = authProfilePathFor(requireAgentDir());
const raw = await fs.readFile(authProfilePath, "utf8");
const parsed = JSON.parse(raw) as {
profiles?: Record<string, { key?: string }>;
};
expect(parsed.profiles?.["xai:default"]?.key).toBe("sk-xai-test");
});
it("sets default model when selecting github-copilot", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;
@@ -284,7 +338,7 @@ describe("applyAuthChoice", () => {
);
expect(result.config.agents?.defaults?.model?.primary).toBe("anthropic/claude-opus-4-5");
expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined();
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-5");
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-6");
});
it("uses existing OPENROUTER_API_KEY when selecting openrouter-api-key", async () => {
@@ -398,7 +452,7 @@ describe("applyAuthChoice", () => {
mode: "api_key",
});
expect(result.config.agents?.defaults?.model?.primary).toBe(
"vercel-ai-gateway/anthropic/claude-opus-4.5",
"vercel-ai-gateway/anthropic/claude-opus-4.6",
);
const authProfilePath = authProfilePathFor(requireAgentDir());

View File

@@ -15,6 +15,7 @@ import {
type GatewayAuthChoice = "token" | "password";
const ANTHROPIC_OAUTH_MODEL_KEYS = [
"anthropic/claude-opus-4-6",
"anthropic/claude-opus-4-5",
"anthropic/claude-sonnet-4-5",
"anthropic/claude-haiku-4-5",
@@ -81,7 +82,7 @@ export async function promptAuthConfig(
config: next,
prompter,
allowedKeys: anthropicOAuth ? ANTHROPIC_OAUTH_MODEL_KEYS : undefined,
initialSelections: anthropicOAuth ? ["anthropic/claude-opus-4-5"] : undefined,
initialSelections: anthropicOAuth ? ["anthropic/claude-opus-4-6"] : undefined,
message: anthropicOAuth ? "Anthropic OAuth models" : undefined,
});
if (allowlistSelection.models) {

View File

@@ -83,8 +83,8 @@ describe("dashboardCommand", () => {
customBindHost: undefined,
basePath: undefined,
});
expect(mocks.copyToClipboard).toHaveBeenCalledWith("http://127.0.0.1:18789/?token=abc123");
expect(mocks.openUrl).toHaveBeenCalledWith("http://127.0.0.1:18789/?token=abc123");
expect(mocks.copyToClipboard).toHaveBeenCalledWith("http://127.0.0.1:18789/");
expect(mocks.openUrl).toHaveBeenCalledWith("http://127.0.0.1:18789/");
expect(runtime.log).toHaveBeenCalledWith(
"Opened in your browser. Keep that tab to control OpenClaw.",
);

View File

@@ -23,7 +23,6 @@ export async function dashboardCommand(
const bind = cfg.gateway?.bind ?? "loopback";
const basePath = cfg.gateway?.controlUi?.basePath;
const customBindHost = cfg.gateway?.customBindHost;
const token = cfg.gateway?.auth?.token ?? process.env.OPENCLAW_GATEWAY_TOKEN ?? "";
const links = resolveControlUiLinks({
port,
@@ -31,11 +30,11 @@ export async function dashboardCommand(
customBindHost,
basePath,
});
const authedUrl = token ? `${links.httpUrl}?token=${encodeURIComponent(token)}` : links.httpUrl;
const dashboardUrl = links.httpUrl;
runtime.log(`Dashboard URL: ${authedUrl}`);
runtime.log(`Dashboard URL: ${dashboardUrl}`);
const copied = await copyToClipboard(authedUrl).catch(() => false);
const copied = await copyToClipboard(dashboardUrl).catch(() => false);
runtime.log(copied ? "Copied to clipboard." : "Copy to clipboard unavailable.");
let opened = false;
@@ -43,13 +42,12 @@ export async function dashboardCommand(
if (!options.noOpen) {
const browserSupport = await detectBrowserOpenSupport();
if (browserSupport.ok) {
opened = await openUrl(authedUrl);
opened = await openUrl(dashboardUrl);
}
if (!opened) {
hint = formatControlUiSshHint({
port,
basePath,
token: token || undefined,
});
}
} else {

View File

@@ -2,6 +2,10 @@ import fs from "node:fs/promises";
import path from "node:path";
import type { RuntimeEnv } from "../runtime.js";
import type { DoctorPrompter } from "./doctor-prompter.js";
import {
resolveControlUiDistIndexHealth,
resolveControlUiDistIndexPathForRoot,
} from "../infra/control-ui-assets.js";
import { resolveOpenClawPackageRoot } from "../infra/openclaw-root.js";
import { runCommandWithTimeout } from "../process/exec.js";
import { note } from "../terminal/note.js";
@@ -21,7 +25,11 @@ export async function maybeRepairUiProtocolFreshness(
}
const schemaPath = path.join(root, "src/gateway/protocol/schema.ts");
const uiIndexPath = path.join(root, "dist/control-ui/index.html");
const uiHealth = await resolveControlUiDistIndexHealth({
root,
argv1: process.argv[1],
});
const uiIndexPath = uiHealth.indexPath ?? resolveControlUiDistIndexPathForRoot(root);
try {
const [schemaStats, uiStats] = await Promise.all([

View File

@@ -0,0 +1,41 @@
import type { OpenClawConfig } from "../config/config.js";
import { DEFAULT_PROVIDER } from "../agents/defaults.js";
import { resolveAllowlistModelKey } from "../agents/model-selection.js";
export function ensureModelAllowlistEntry(params: {
cfg: OpenClawConfig;
modelRef: string;
defaultProvider?: string;
}): OpenClawConfig {
const rawModelRef = params.modelRef.trim();
if (!rawModelRef) {
return params.cfg;
}
const models = { ...params.cfg.agents?.defaults?.models };
const keySet = new Set<string>([rawModelRef]);
const canonicalKey = resolveAllowlistModelKey(
rawModelRef,
params.defaultProvider ?? DEFAULT_PROVIDER,
);
if (canonicalKey) {
keySet.add(canonicalKey);
}
for (const key of keySet) {
models[key] = {
...models[key],
};
}
return {
...params.cfg,
agents: {
...params.cfg.agents,
defaults: {
...params.cfg.agents?.defaults,
models,
},
},
};
}

View File

@@ -12,6 +12,7 @@ import {
resolveConfiguredModelRef,
} from "../agents/model-selection.js";
import { formatTokenK } from "./models/shared.js";
import { OPENAI_CODEX_DEFAULT_MODEL } from "./openai-codex-model-default.js";
const KEEP_VALUE = "__keep__";
const MANUAL_VALUE = "__manual__";
@@ -331,7 +332,7 @@ export async function promptModelAllowlist(params: {
params.message ??
"Allowlist models (comma-separated provider/model; blank to keep current)",
initialValue: existingKeys.join(", "),
placeholder: "openai-codex/gpt-5.2, anthropic/claude-opus-4-5",
placeholder: `${OPENAI_CODEX_DEFAULT_MODEL}, anthropic/claude-opus-4-6`,
});
const parsed = String(raw ?? "")
.split(",")

View File

@@ -27,17 +27,21 @@ import {
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
XIAOMI_DEFAULT_MODEL_REF,
ZAI_DEFAULT_MODEL_REF,
XAI_DEFAULT_MODEL_REF,
} from "./onboard-auth.credentials.js";
import {
buildMoonshotModelDefinition,
QIANFAN_BASE_URL,
QIANFAN_DEFAULT_MODEL_REF,
QIANFAN_DEFAULT_MODEL_ID,
buildXaiModelDefinition,
KIMI_CODING_MODEL_REF,
MOONSHOT_BASE_URL,
MOONSHOT_CN_BASE_URL,
MOONSHOT_DEFAULT_MODEL_ID,
MOONSHOT_DEFAULT_MODEL_REF,
XAI_BASE_URL,
XAI_DEFAULT_MODEL_ID,
} from "./onboard-auth.models.js";
export function applyZaiConfig(cfg: OpenClawConfig): OpenClawConfig {
@@ -596,6 +600,71 @@ export function applyVeniceConfig(cfg: OpenClawConfig): OpenClawConfig {
};
}
export function applyXaiProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
models[XAI_DEFAULT_MODEL_REF] = {
...models[XAI_DEFAULT_MODEL_REF],
alias: models[XAI_DEFAULT_MODEL_REF]?.alias ?? "Grok",
};
const providers = { ...cfg.models?.providers };
const existingProvider = providers.xai;
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
const defaultModel = buildXaiModelDefinition();
const hasDefaultModel = existingModels.some((model) => model.id === XAI_DEFAULT_MODEL_ID);
const mergedModels = hasDefaultModel ? existingModels : [...existingModels, defaultModel];
const { apiKey: existingApiKey, ...existingProviderRest } = (existingProvider ?? {}) as Record<
string,
unknown
> as { apiKey?: string };
const resolvedApiKey = typeof existingApiKey === "string" ? existingApiKey : undefined;
const normalizedApiKey = resolvedApiKey?.trim();
providers.xai = {
...existingProviderRest,
baseUrl: XAI_BASE_URL,
api: "openai-completions",
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : [defaultModel],
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: {
mode: cfg.models?.mode ?? "merge",
providers,
},
};
}
export function applyXaiConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = applyXaiProviderConfig(cfg);
const existingModel = next.agents?.defaults?.model;
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(existingModel && "fallbacks" in (existingModel as Record<string, unknown>)
? {
fallbacks: (existingModel as { fallbacks?: string[] }).fallbacks,
}
: undefined),
primary: XAI_DEFAULT_MODEL_REF,
},
},
},
};
}
export function applyAuthProfileConfig(
cfg: OpenClawConfig,
params: {

View File

@@ -14,9 +14,9 @@ import {
export function applyMinimaxProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
models["anthropic/claude-opus-4-5"] = {
...models["anthropic/claude-opus-4-5"],
alias: models["anthropic/claude-opus-4-5"]?.alias ?? "Opus",
models["anthropic/claude-opus-4-6"] = {
...models["anthropic/claude-opus-4-6"],
alias: models["anthropic/claude-opus-4-6"]?.alias ?? "Opus",
};
models["lmstudio/minimax-m2.1-gs32"] = {
...models["lmstudio/minimax-m2.1-gs32"],

View File

@@ -2,6 +2,7 @@ import type { OAuthCredentials } from "@mariozechner/pi-ai";
import { resolveOpenClawAgentDir } from "../agents/agent-paths.js";
import { upsertAuthProfile } from "../agents/auth-profiles.js";
export { CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF } from "../agents/cloudflare-ai-gateway.js";
export { XAI_DEFAULT_MODEL_REF } from "./onboard-auth.models.js";
const resolveAuthAgentDir = (agentDir?: string) => agentDir ?? resolveOpenClawAgentDir();
@@ -117,7 +118,7 @@ export async function setVeniceApiKey(key: string, agentDir?: string) {
export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
export const XIAOMI_DEFAULT_MODEL_REF = "xiaomi/mimo-v2-flash";
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.5";
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.6";
export async function setZaiApiKey(key: string, agentDir?: string) {
// Write to resolved agent dir so gateway finds credentials on startup.
@@ -211,6 +212,17 @@ export function setQianfanApiKey(key: string, agentDir?: string) {
type: "api_key",
provider: "qianfan",
key,
},
agentDir: resolveAuthAgentDir(agentDir),
});
}
export function setXaiApiKey(key: string, agentDir?: string) {
upsertAuthProfile({
profileId: "xai:default",
credential: {
type: "api_key",
provider: "xai",
key,
},
agentDir: resolveAuthAgentDir(agentDir),
});

View File

@@ -116,3 +116,26 @@ export function buildQianfanModelDefinition(): ModelDefinitionConfig {
maxTokens: QIANFAN_DEFAULT_MAX_TOKENS,
};
}
export const XAI_BASE_URL = "https://api.x.ai/v1";
export const XAI_DEFAULT_MODEL_ID = "grok-4";
export const XAI_DEFAULT_MODEL_REF = `xai/${XAI_DEFAULT_MODEL_ID}`;
export const XAI_DEFAULT_CONTEXT_WINDOW = 131072;
export const XAI_DEFAULT_MAX_TOKENS = 8192;
export const XAI_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
export function buildXaiModelDefinition(): ModelDefinitionConfig {
return {
id: XAI_DEFAULT_MODEL_ID,
name: "Grok 4",
reasoning: false,
input: ["text"],
cost: XAI_DEFAULT_COST,
contextWindow: XAI_DEFAULT_CONTEXT_WINDOW,
maxTokens: XAI_DEFAULT_MAX_TOKENS,
};
}

View File

@@ -13,11 +13,14 @@ import {
applyOpenrouterProviderConfig,
applySyntheticConfig,
applySyntheticProviderConfig,
applyXaiConfig,
applyXaiProviderConfig,
applyXiaomiConfig,
applyXiaomiProviderConfig,
OPENROUTER_DEFAULT_MODEL_REF,
SYNTHETIC_DEFAULT_MODEL_ID,
SYNTHETIC_DEFAULT_MODEL_REF,
XAI_DEFAULT_MODEL_REF,
setMinimaxApiKey,
writeOAuthCredentials,
} from "./onboard-auth.js";
@@ -389,11 +392,70 @@ describe("applyXiaomiConfig", () => {
});
});
describe("applyXaiConfig", () => {
it("adds xAI provider with correct settings", () => {
const cfg = applyXaiConfig({});
expect(cfg.models?.providers?.xai).toMatchObject({
baseUrl: "https://api.x.ai/v1",
api: "openai-completions",
});
expect(cfg.agents?.defaults?.model?.primary).toBe(XAI_DEFAULT_MODEL_REF);
});
it("preserves existing model fallbacks", () => {
const cfg = applyXaiConfig({
agents: {
defaults: {
model: { fallbacks: ["anthropic/claude-opus-4-5"] },
},
},
});
expect(cfg.agents?.defaults?.model?.fallbacks).toEqual(["anthropic/claude-opus-4-5"]);
});
});
describe("applyXaiProviderConfig", () => {
it("adds model alias", () => {
const cfg = applyXaiProviderConfig({});
expect(cfg.agents?.defaults?.models?.[XAI_DEFAULT_MODEL_REF]?.alias).toBe("Grok");
});
it("merges xAI models and keeps existing provider overrides", () => {
const cfg = applyXaiProviderConfig({
models: {
providers: {
xai: {
baseUrl: "https://old.example.com",
apiKey: "old-key",
api: "anthropic-messages",
models: [
{
id: "custom-model",
name: "Custom",
reasoning: false,
input: ["text"],
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1000,
maxTokens: 100,
},
],
},
},
},
});
expect(cfg.models?.providers?.xai?.baseUrl).toBe("https://api.x.ai/v1");
expect(cfg.models?.providers?.xai?.api).toBe("openai-completions");
expect(cfg.models?.providers?.xai?.apiKey).toBe("old-key");
expect(cfg.models?.providers?.xai?.models.map((m) => m.id)).toEqual(["custom-model", "grok-4"]);
});
});
describe("applyOpencodeZenProviderConfig", () => {
it("adds allowlist entry for the default model", () => {
const cfg = applyOpencodeZenProviderConfig({});
const models = cfg.agents?.defaults?.models ?? {};
expect(Object.keys(models)).toContain("opencode/claude-opus-4-5");
expect(Object.keys(models)).toContain("opencode/claude-opus-4-6");
});
it("preserves existing alias for the default model", () => {
@@ -401,19 +463,19 @@ describe("applyOpencodeZenProviderConfig", () => {
agents: {
defaults: {
models: {
"opencode/claude-opus-4-5": { alias: "My Opus" },
"opencode/claude-opus-4-6": { alias: "My Opus" },
},
},
},
});
expect(cfg.agents?.defaults?.models?.["opencode/claude-opus-4-5"]?.alias).toBe("My Opus");
expect(cfg.agents?.defaults?.models?.["opencode/claude-opus-4-6"]?.alias).toBe("My Opus");
});
});
describe("applyOpencodeZenConfig", () => {
it("sets correct primary model", () => {
const cfg = applyOpencodeZenConfig({});
expect(cfg.agents?.defaults?.model?.primary).toBe("opencode/claude-opus-4-5");
expect(cfg.agents?.defaults?.model?.primary).toBe("opencode/claude-opus-4-6");
});
it("preserves existing model fallbacks", () => {

View File

@@ -26,6 +26,8 @@ export {
applyXiaomiConfig,
applyXiaomiProviderConfig,
applyZaiConfig,
applyXaiConfig,
applyXaiProviderConfig,
} from "./onboard-auth.config-core.js";
export {
applyMinimaxApiConfig,
@@ -57,10 +59,12 @@ export {
setVercelAiGatewayApiKey,
setXiaomiApiKey,
setZaiApiKey,
setXaiApiKey,
writeOAuthCredentials,
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
XIAOMI_DEFAULT_MODEL_REF,
ZAI_DEFAULT_MODEL_REF,
XAI_DEFAULT_MODEL_REF,
} from "./onboard-auth.credentials.js";
export {
buildQianfanModelDefinition,

View File

@@ -179,23 +179,16 @@ export async function detectBrowserOpenSupport(): Promise<BrowserOpenSupport> {
return { ok: true, command: resolved.command };
}
export function formatControlUiSshHint(params: {
port: number;
basePath?: string;
token?: string;
}): string {
export function formatControlUiSshHint(params: { port: number; basePath?: string }): string {
const basePath = normalizeControlUiBasePath(params.basePath);
const uiPath = basePath ? `${basePath}/` : "/";
const localUrl = `http://localhost:${params.port}${uiPath}`;
const tokenParam = params.token ? `?token=${encodeURIComponent(params.token)}` : "";
const authedUrl = params.token ? `${localUrl}${tokenParam}` : undefined;
const sshTarget = resolveSshTargetHint();
return [
"No GUI detected. Open from your computer:",
`ssh -N -L ${params.port}:127.0.0.1:${params.port} ${sshTarget}`,
"Then open:",
localUrl,
authedUrl,
"Docs:",
"https://docs.openclaw.ai/gateway/remote",
"https://docs.openclaw.ai/web/control-ui",

View File

@@ -66,7 +66,7 @@ describe("onboard (non-interactive): Vercel AI Gateway", () => {
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.provider).toBe("vercel-ai-gateway");
expect(cfg.auth?.profiles?.["vercel-ai-gateway:default"]?.mode).toBe("api_key");
expect(cfg.agents?.defaults?.model?.primary).toBe(
"vercel-ai-gateway/anthropic/claude-opus-4.5",
"vercel-ai-gateway/anthropic/claude-opus-4.6",
);
const { ensureAuthProfileStore } = await import("../agents/auth-profiles.js");

View File

@@ -0,0 +1,77 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it, vi } from "vitest";
import { OPENAI_DEFAULT_MODEL } from "./openai-model-default.js";
describe("onboard (non-interactive): OpenAI API key", () => {
it("stores OPENAI_API_KEY and configures the OpenAI default model", async () => {
const prev = {
home: process.env.HOME,
stateDir: process.env.OPENCLAW_STATE_DIR,
configPath: process.env.OPENCLAW_CONFIG_PATH,
skipChannels: process.env.OPENCLAW_SKIP_CHANNELS,
skipGmail: process.env.OPENCLAW_SKIP_GMAIL_WATCHER,
skipCron: process.env.OPENCLAW_SKIP_CRON,
skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST,
token: process.env.OPENCLAW_GATEWAY_TOKEN,
password: process.env.OPENCLAW_GATEWAY_PASSWORD,
};
process.env.OPENCLAW_SKIP_CHANNELS = "1";
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = "1";
process.env.OPENCLAW_SKIP_CRON = "1";
process.env.OPENCLAW_SKIP_CANVAS_HOST = "1";
delete process.env.OPENCLAW_GATEWAY_TOKEN;
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-onboard-openai-"));
process.env.HOME = tempHome;
process.env.OPENCLAW_STATE_DIR = tempHome;
process.env.OPENCLAW_CONFIG_PATH = path.join(tempHome, "openclaw.json");
vi.resetModules();
const runtime = {
log: () => {},
error: (msg: string) => {
throw new Error(msg);
},
exit: (code: number) => {
throw new Error(`exit:${code}`);
},
};
try {
const { runNonInteractiveOnboarding } = await import("./onboard-non-interactive.js");
await runNonInteractiveOnboarding(
{
nonInteractive: true,
authChoice: "openai-api-key",
openaiApiKey: "sk-openai-test",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const { CONFIG_PATH } = await import("../config/config.js");
const cfg = JSON.parse(await fs.readFile(CONFIG_PATH, "utf8")) as {
agents?: { defaults?: { model?: { primary?: string } } };
};
expect(cfg.agents?.defaults?.model?.primary).toBe(OPENAI_DEFAULT_MODEL);
} finally {
await fs.rm(tempHome, { recursive: true, force: true });
process.env.HOME = prev.home;
process.env.OPENCLAW_STATE_DIR = prev.stateDir;
process.env.OPENCLAW_CONFIG_PATH = prev.configPath;
process.env.OPENCLAW_SKIP_CHANNELS = prev.skipChannels;
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = prev.skipGmail;
process.env.OPENCLAW_SKIP_CRON = prev.skipCron;
process.env.OPENCLAW_SKIP_CANVAS_HOST = prev.skipCanvas;
process.env.OPENCLAW_GATEWAY_TOKEN = prev.token;
process.env.OPENCLAW_GATEWAY_PASSWORD = prev.password;
}
}, 60_000);
});

View File

@@ -0,0 +1,91 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it, vi } from "vitest";
describe("onboard (non-interactive): xAI", () => {
it("stores the API key and configures the default model", async () => {
const prev = {
home: process.env.HOME,
stateDir: process.env.OPENCLAW_STATE_DIR,
configPath: process.env.OPENCLAW_CONFIG_PATH,
skipChannels: process.env.OPENCLAW_SKIP_CHANNELS,
skipGmail: process.env.OPENCLAW_SKIP_GMAIL_WATCHER,
skipCron: process.env.OPENCLAW_SKIP_CRON,
skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST,
token: process.env.OPENCLAW_GATEWAY_TOKEN,
password: process.env.OPENCLAW_GATEWAY_PASSWORD,
};
process.env.OPENCLAW_SKIP_CHANNELS = "1";
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = "1";
process.env.OPENCLAW_SKIP_CRON = "1";
process.env.OPENCLAW_SKIP_CANVAS_HOST = "1";
delete process.env.OPENCLAW_GATEWAY_TOKEN;
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-onboard-xai-"));
process.env.HOME = tempHome;
process.env.OPENCLAW_STATE_DIR = tempHome;
process.env.OPENCLAW_CONFIG_PATH = path.join(tempHome, "openclaw.json");
vi.resetModules();
const runtime = {
log: () => {},
error: (msg: string) => {
throw new Error(msg);
},
exit: (code: number) => {
throw new Error(`exit:${code}`);
},
};
try {
const { runNonInteractiveOnboarding } = await import("./onboard-non-interactive.js");
await runNonInteractiveOnboarding(
{
nonInteractive: true,
authChoice: "xai-api-key",
xaiApiKey: "xai-test-key",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const { CONFIG_PATH } = await import("../config/config.js");
const cfg = JSON.parse(await fs.readFile(CONFIG_PATH, "utf8")) as {
auth?: {
profiles?: Record<string, { provider?: string; mode?: string }>;
};
agents?: { defaults?: { model?: { primary?: string } } };
};
expect(cfg.auth?.profiles?.["xai:default"]?.provider).toBe("xai");
expect(cfg.auth?.profiles?.["xai:default"]?.mode).toBe("api_key");
expect(cfg.agents?.defaults?.model?.primary).toBe("xai/grok-4");
const { ensureAuthProfileStore } = await import("../agents/auth-profiles.js");
const store = ensureAuthProfileStore();
const profile = store.profiles["xai:default"];
expect(profile?.type).toBe("api_key");
if (profile?.type === "api_key") {
expect(profile.provider).toBe("xai");
expect(profile.key).toBe("xai-test-key");
}
} finally {
await fs.rm(tempHome, { recursive: true, force: true });
process.env.HOME = prev.home;
process.env.OPENCLAW_STATE_DIR = prev.stateDir;
process.env.OPENCLAW_CONFIG_PATH = prev.configPath;
process.env.OPENCLAW_SKIP_CHANNELS = prev.skipChannels;
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = prev.skipGmail;
process.env.OPENCLAW_SKIP_CRON = prev.skipCron;
process.env.OPENCLAW_SKIP_CANVAS_HOST = prev.skipCanvas;
process.env.OPENCLAW_GATEWAY_TOKEN = prev.token;
process.env.OPENCLAW_GATEWAY_PASSWORD = prev.password;
}
}, 60_000);
});

View File

@@ -22,6 +22,7 @@ type AuthChoiceFlagOptions = Pick<
| "xiaomiApiKey"
| "minimaxApiKey"
| "opencodeZenApiKey"
| "xaiApiKey"
>;
const AUTH_CHOICE_FLAG_MAP = [
@@ -41,6 +42,7 @@ const AUTH_CHOICE_FLAG_MAP = [
{ flag: "veniceApiKey", authChoice: "venice-api-key", label: "--venice-api-key" },
{ flag: "zaiApiKey", authChoice: "zai-api-key", label: "--zai-api-key" },
{ flag: "xiaomiApiKey", authChoice: "xiaomi-api-key", label: "--xiaomi-api-key" },
{ flag: "xaiApiKey", authChoice: "xai-api-key", label: "--xai-api-key" },
{ flag: "minimaxApiKey", authChoice: "minimax-api", label: "--minimax-api-key" },
{ flag: "opencodeZenApiKey", authChoice: "opencode-zen", label: "--opencode-zen-api-key" },
] satisfies ReadonlyArray<AuthChoiceFlag>;

View File

@@ -22,6 +22,7 @@ import {
applySyntheticConfig,
applyVeniceConfig,
applyVercelAiGatewayConfig,
applyXaiConfig,
applyXiaomiConfig,
applyZaiConfig,
setAnthropicApiKey,
@@ -34,11 +35,13 @@ import {
setOpencodeZenApiKey,
setOpenrouterApiKey,
setSyntheticApiKey,
setXaiApiKey,
setVeniceApiKey,
setVercelAiGatewayApiKey,
setXiaomiApiKey,
setZaiApiKey,
} from "../../onboard-auth.js";
import { applyOpenAIConfig } from "../../openai-model-default.js";
import { resolveNonInteractiveApiKey } from "../api-keys.js";
export async function applyNonInteractiveAuthChoice(params: {
@@ -226,6 +229,13 @@ export async function applyNonInteractiveAuthChoice(params: {
flagValue: opts.qianfanApiKey,
flagName: "--qianfan-api-key",
envVar: "QIANFAN_API_KEY",
if (authChoice === "xai-api-key") {
const resolved = await resolveNonInteractiveApiKey({
provider: "xai",
cfg: baseConfig,
flagValue: opts.xaiApiKey,
flagName: "--xai-api-key",
envVar: "XAI_API_KEY",
runtime,
});
if (!resolved) {
@@ -240,6 +250,14 @@ export async function applyNonInteractiveAuthChoice(params: {
mode: "api_key",
});
return applyQianfanConfig(nextConfig);
setXaiApiKey(resolved.key);
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "xai:default",
provider: "xai",
mode: "api_key",
});
return applyXaiConfig(nextConfig);
}
if (authChoice === "openai-api-key") {
@@ -259,7 +277,7 @@ export async function applyNonInteractiveAuthChoice(params: {
const result = upsertSharedEnvVar({ key: "OPENAI_API_KEY", value: key });
process.env.OPENAI_API_KEY = key;
runtime.log(`Saved OPENAI_API_KEY to ${shortenHomePath(result.path)}`);
return nextConfig;
return applyOpenAIConfig(nextConfig);
}
if (authChoice === "openrouter-api-key") {

View File

@@ -155,22 +155,29 @@ export async function setupSkills(
installId,
config: next,
});
const warnings = result.warnings ?? [];
if (result.ok) {
spin.stop(`Installed ${name}`);
} else {
const code = result.code == null ? "" : ` (exit ${result.code})`;
const detail = summarizeInstallFailure(result.message);
spin.stop(`Install failed: ${name}${code}${detail ? `${detail}` : ""}`);
if (result.stderr) {
runtime.log(result.stderr.trim());
} else if (result.stdout) {
runtime.log(result.stdout.trim());
spin.stop(warnings.length > 0 ? `Installed ${name} (with warnings)` : `Installed ${name}`);
for (const warning of warnings) {
runtime.log(warning);
}
runtime.log(
`Tip: run \`${formatCliCommand("openclaw doctor")}\` to review skills + requirements.`,
);
runtime.log("Docs: https://docs.openclaw.ai/skills");
continue;
}
const code = result.code == null ? "" : ` (exit ${result.code})`;
const detail = summarizeInstallFailure(result.message);
spin.stop(`Install failed: ${name}${code}${detail ? `${detail}` : ""}`);
for (const warning of warnings) {
runtime.log(warning);
}
if (result.stderr) {
runtime.log(result.stderr.trim());
} else if (result.stdout) {
runtime.log(result.stdout.trim());
}
runtime.log(
`Tip: run \`${formatCliCommand("openclaw doctor")}\` to review skills + requirements.`,
);
runtime.log("Docs: https://docs.openclaw.ai/skills");
}
}

View File

@@ -36,6 +36,7 @@ export type AuthChoice =
| "copilot-proxy"
| "qwen-portal"
| "qianfan-api-key"
| "xai-api-key"
| "skip";
export type GatewayAuthChoice = "token" | "password";
export type ResetScope = "config" | "config+creds+sessions" | "full";
@@ -81,6 +82,7 @@ export type OnboardOptions = {
veniceApiKey?: string;
opencodeZenApiKey?: string;
qianfanApiKey?: string;
xaiApiKey?: string;
gatewayPort?: number;
gatewayBind?: GatewayBind;
gatewayAuth?: GatewayAuthChoice;

View File

@@ -4,6 +4,7 @@ import {
applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js";
import { OPENAI_DEFAULT_MODEL } from "./openai-model-default.js";
describe("applyOpenAICodexModelDefault", () => {
it("sets openai-codex default when model is unset", () => {
@@ -17,7 +18,7 @@ describe("applyOpenAICodexModelDefault", () => {
it("sets openai-codex default when model is openai/*", () => {
const cfg: OpenClawConfig = {
agents: { defaults: { model: "openai/gpt-5.2" } },
agents: { defaults: { model: OPENAI_DEFAULT_MODEL } },
};
const applied = applyOpenAICodexModelDefault(cfg);
expect(applied.changed).toBe(true);
@@ -28,7 +29,7 @@ describe("applyOpenAICodexModelDefault", () => {
it("does not override openai-codex/*", () => {
const cfg: OpenClawConfig = {
agents: { defaults: { model: "openai-codex/gpt-5.2" } },
agents: { defaults: { model: OPENAI_CODEX_DEFAULT_MODEL } },
};
const applied = applyOpenAICodexModelDefault(cfg);
expect(applied.changed).toBe(false);

View File

@@ -1,7 +1,7 @@
import type { OpenClawConfig } from "../config/config.js";
import type { AgentModelListConfig } from "../config/types.js";
export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.2";
export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.3-codex";
function shouldSetOpenAICodexModel(model?: string): boolean {
const trimmed = model?.trim();

View File

@@ -0,0 +1,40 @@
import { describe, expect, it } from "vitest";
import {
applyOpenAIConfig,
applyOpenAIProviderConfig,
OPENAI_DEFAULT_MODEL,
} from "./openai-model-default.js";
describe("applyOpenAIProviderConfig", () => {
it("adds allowlist entry for default model", () => {
const next = applyOpenAIProviderConfig({});
expect(Object.keys(next.agents?.defaults?.models ?? {})).toContain(OPENAI_DEFAULT_MODEL);
});
it("preserves existing alias for default model", () => {
const next = applyOpenAIProviderConfig({
agents: {
defaults: {
models: {
[OPENAI_DEFAULT_MODEL]: { alias: "My GPT" },
},
},
},
});
expect(next.agents?.defaults?.models?.[OPENAI_DEFAULT_MODEL]?.alias).toBe("My GPT");
});
});
describe("applyOpenAIConfig", () => {
it("sets default when model is unset", () => {
const next = applyOpenAIConfig({});
expect(next.agents?.defaults?.model).toEqual({ primary: OPENAI_DEFAULT_MODEL });
});
it("overrides model.primary when model object already exists", () => {
const next = applyOpenAIConfig({
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-6", fallback: [] } } },
});
expect(next.agents?.defaults?.model).toEqual({ primary: OPENAI_DEFAULT_MODEL, fallback: [] });
});
});

View File

@@ -0,0 +1,47 @@
import type { OpenClawConfig } from "../config/config.js";
import { ensureModelAllowlistEntry } from "./model-allowlist.js";
export const OPENAI_DEFAULT_MODEL = "openai/gpt-5.1-codex";
export function applyOpenAIProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = ensureModelAllowlistEntry({
cfg,
modelRef: OPENAI_DEFAULT_MODEL,
});
const models = { ...next.agents?.defaults?.models };
models[OPENAI_DEFAULT_MODEL] = {
...models[OPENAI_DEFAULT_MODEL],
alias: models[OPENAI_DEFAULT_MODEL]?.alias ?? "GPT",
};
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
models,
},
},
};
}
export function applyOpenAIConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = applyOpenAIProviderConfig(cfg);
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model:
next.agents?.defaults?.model && typeof next.agents.defaults.model === "object"
? {
...next.agents.defaults.model,
primary: OPENAI_DEFAULT_MODEL,
}
: { primary: OPENAI_DEFAULT_MODEL },
},
},
};
}

View File

@@ -1,8 +1,11 @@
import type { OpenClawConfig } from "../config/config.js";
import type { AgentModelListConfig } from "../config/types.js";
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-5";
const LEGACY_OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-6";
const LEGACY_OPENCODE_ZEN_DEFAULT_MODELS = new Set([
"opencode/claude-opus-4-5",
"opencode-zen/claude-opus-4-5",
]);
function resolvePrimaryModel(model?: AgentModelListConfig | string): string | undefined {
if (typeof model === "string") {
@@ -20,7 +23,9 @@ export function applyOpencodeZenModelDefault(cfg: OpenClawConfig): {
} {
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
const normalizedCurrent =
current === LEGACY_OPENCODE_ZEN_DEFAULT_MODEL ? OPENCODE_ZEN_DEFAULT_MODEL : current;
current && LEGACY_OPENCODE_ZEN_DEFAULT_MODELS.has(current)
? OPENCODE_ZEN_DEFAULT_MODEL
: current;
if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) {
return { next: cfg, changed: false };
}