Auth: land codex oauth onboarding flow (#15406)

This commit is contained in:
Mariano Belinky
2026-02-13 17:18:20 +00:00
parent 7ec60d6449
commit 86e4fe0a7a
6 changed files with 282 additions and 55 deletions

View File

@@ -25,6 +25,7 @@ Docs: https://docs.openclaw.ai
- Auto-reply/Threading: auto-inject implicit reply threading so `replyToMode` works without requiring model-emitted `[[reply_to_current]]`, while preserving `replyToMode: "off"` behavior for implicit Slack replies and keeping block-streaming chunk coalescing stable under `replyToMode: "first"`. (#14976) Thanks @Diaspar4u.
- Sandbox: pass configured `sandbox.docker.env` variables to sandbox containers at `docker create` time. (#15138) Thanks @stevebot-alive.
- Onboarding/CLI: restore terminal state without resuming paused `stdin`, so onboarding exits cleanly after choosing Web UI and the installer returns instead of appearing stuck.
- Auth/OpenAI Codex: share OAuth login handling across onboarding and `models auth login --provider openai-codex`, keep onboarding alive when OAuth fails, and surface a direct OAuth help note instead of terminating the wizard. (#15406, follow-up to #14552) Thanks @zhiluo20.
- Onboarding/Providers: add vLLM as an onboarding provider with model discovery, auth profile wiring, and non-interactive auth-choice validation. (#12577) Thanks @gejifeng.
- Onboarding/Providers: preserve Hugging Face auth intent in auth-choice remapping (`tokenProvider=huggingface` with `authChoice=apiKey`) and skip env-override prompts when an explicit token is provided. (#13472) Thanks @Josephrp.
- OpenAI Codex/Spark: implement end-to-end `gpt-5.3-codex-spark` support across fallback/thinking/model resolution and `models list` forward-compat visibility. (#14990, #15174) Thanks @L-U-C-K-Y, @loiie45e.

View File

@@ -1,4 +1,3 @@
import { loginOpenAICodex } from "@mariozechner/pi-ai";
import type { ApplyAuthChoiceParams, ApplyAuthChoiceResult } from "./auth-choice.apply.js";
import { resolveEnvApiKey } from "../agents/model-auth.js";
import { upsertSharedEnvVar } from "../infra/env-file.js";
@@ -9,13 +8,13 @@ import {
} from "./auth-choice.api-key.js";
import { applyDefaultModelChoice } from "./auth-choice.default-model.js";
import { isRemoteEnvironment } from "./oauth-env.js";
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
import { applyAuthProfileConfig, writeOAuthCredentials } from "./onboard-auth.js";
import { openUrl } from "./onboard-helpers.js";
import {
applyOpenAICodexModelDefault,
OPENAI_CODEX_DEFAULT_MODEL,
} from "./openai-codex-model-default.js";
import { loginOpenAICodexOAuth } from "./openai-codex-oauth.js";
import {
applyOpenAIConfig,
applyOpenAIProviderConfig,
@@ -125,66 +124,42 @@ export async function applyAuthChoiceOpenAI(
);
};
const isRemote = isRemoteEnvironment();
await params.prompter.note(
isRemote
? [
"You are running in a remote/VPS environment.",
"A URL will be shown for you to open in your LOCAL browser.",
"After signing in, paste the redirect URL back here.",
].join("\n")
: [
"Browser will open for OpenAI authentication.",
"If the callback doesn't auto-complete, paste the redirect URL.",
"OpenAI OAuth uses localhost:1455 for the callback.",
].join("\n"),
"OpenAI Codex OAuth",
);
const spin = params.prompter.progress("Starting OAuth flow…");
let creds;
try {
const { onAuth, onPrompt } = createVpsAwareOAuthHandlers({
isRemote,
creds = await loginOpenAICodexOAuth({
prompter: params.prompter,
runtime: params.runtime,
spin,
openUrl,
isRemote: isRemoteEnvironment(),
openUrl: async (url) => {
await openUrl(url);
},
localBrowserMessage: "Complete sign-in in browser…",
});
const creds = await loginOpenAICodex({
onAuth,
onPrompt,
onProgress: (msg) => spin.update(msg),
} catch {
// The helper already surfaces the error to the user.
// Keep onboarding flow alive and return unchanged config.
return { config: nextConfig, agentModelOverride };
}
if (creds) {
await writeOAuthCredentials("openai-codex", creds, params.agentDir);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "openai-codex:default",
provider: "openai-codex",
mode: "oauth",
});
spin.stop("OpenAI OAuth complete");
if (creds) {
await writeOAuthCredentials("openai-codex", creds, params.agentDir);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "openai-codex:default",
provider: "openai-codex",
mode: "oauth",
});
if (params.setDefaultModel) {
const applied = applyOpenAICodexModelDefault(nextConfig);
nextConfig = applied.next;
if (applied.changed) {
await params.prompter.note(
`Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`,
"Model configured",
);
}
} else {
agentModelOverride = OPENAI_CODEX_DEFAULT_MODEL;
await noteAgentModel(OPENAI_CODEX_DEFAULT_MODEL);
if (params.setDefaultModel) {
const applied = applyOpenAICodexModelDefault(nextConfig);
nextConfig = applied.next;
if (applied.changed) {
await params.prompter.note(
`Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`,
"Model configured",
);
}
} else {
agentModelOverride = OPENAI_CODEX_DEFAULT_MODEL;
await noteAgentModel(OPENAI_CODEX_DEFAULT_MODEL);
}
} catch (err) {
spin.stop("OpenAI OAuth failed");
params.runtime.error(String(err));
await params.prompter.note(
"Trouble with OAuth? See https://docs.openclaw.ai/start/faq",
"OAuth help",
);
}
return { config: nextConfig, agentModelOverride };
}

View File

@@ -12,6 +12,11 @@ vi.mock("../providers/github-copilot-auth.js", () => ({
githubCopilotLoginCommand: vi.fn(async () => {}),
}));
const loginOpenAICodexOAuth = vi.hoisted(() => vi.fn(async () => null));
vi.mock("./openai-codex-oauth.js", () => ({
loginOpenAICodexOAuth,
}));
const resolvePluginProviders = vi.hoisted(() => vi.fn(() => []));
vi.mock("../plugins/providers.js", () => ({
resolvePluginProviders,
@@ -46,6 +51,8 @@ describe("applyAuthChoice", () => {
afterEach(async () => {
vi.unstubAllGlobals();
resolvePluginProviders.mockReset();
loginOpenAICodexOAuth.mockReset();
loginOpenAICodexOAuth.mockResolvedValue(null);
if (tempStateDir) {
await fs.rm(tempStateDir, { recursive: true, force: true });
tempStateDir = null;
@@ -112,6 +119,43 @@ describe("applyAuthChoice", () => {
}
});
it("does not throw when openai-codex oauth fails", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;
process.env.OPENCLAW_AGENT_DIR = path.join(tempStateDir, "agent");
process.env.PI_CODING_AGENT_DIR = process.env.OPENCLAW_AGENT_DIR;
loginOpenAICodexOAuth.mockRejectedValueOnce(new Error("oauth failed"));
const prompter: WizardPrompter = {
intro: vi.fn(noopAsync),
outro: vi.fn(noopAsync),
note: vi.fn(noopAsync),
select: vi.fn(async () => "" as never),
multiselect: vi.fn(async () => []),
text: vi.fn(async () => ""),
confirm: vi.fn(async () => false),
progress: vi.fn(() => ({ update: noop, stop: noop })),
};
const runtime: RuntimeEnv = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn((code: number) => {
throw new Error(`exit:${code}`);
}),
};
await expect(
applyAuthChoice({
authChoice: "openai-codex",
config: {},
prompter,
runtime,
setDefaultModel: false,
}),
).resolves.toEqual({ config: {} });
});
it("prompts and writes MiniMax API key when selecting minimax-api", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;

View File

@@ -26,6 +26,8 @@ import { isRemoteEnvironment } from "../oauth-env.js";
import { createVpsAwareOAuthHandlers } from "../oauth-flow.js";
import { applyAuthProfileConfig } from "../onboard-auth.js";
import { openUrl } from "../onboard-helpers.js";
import { OPENAI_CODEX_DEFAULT_MODEL } from "../openai-codex-model-default.js";
import { loginOpenAICodexOAuth } from "../openai-codex-oauth.js";
import { updateConfig } from "./shared.js";
const confirm = (params: Parameters<typeof clackConfirm>[0]) =>
@@ -342,6 +344,59 @@ export async function modelsAuthLoginCommand(opts: LoginOptions, runtime: Runtim
const workspaceDir =
resolveAgentWorkspaceDir(config, defaultAgentId) ?? resolveDefaultAgentWorkspaceDir();
const prompter = createClackPrompter();
const requestedProvider = opts.provider ? normalizeProviderId(opts.provider) : null;
if (requestedProvider === "openai-codex") {
const method = opts.method?.trim().toLowerCase();
if (method && method !== "oauth") {
throw new Error('OpenAI Codex auth only supports --method "oauth".');
}
const creds = await loginOpenAICodexOAuth({
prompter,
runtime,
isRemote: isRemoteEnvironment(),
openUrl: async (url) => {
await openUrl(url);
},
});
if (!creds) {
return;
}
const profileId = "openai-codex:default";
upsertAuthProfile({
profileId,
credential: {
type: "oauth",
provider: "openai-codex",
...creds,
},
agentDir,
});
await updateConfig((cfg) => {
let next = applyAuthProfileConfig(cfg, {
profileId,
provider: "openai-codex",
mode: "oauth",
});
if (opts.setDefault) {
next = applyDefaultModel(next, OPENAI_CODEX_DEFAULT_MODEL);
}
return next;
});
logConfigUpdated(runtime);
runtime.log(`Auth profile: ${profileId} (openai-codex/oauth)`);
runtime.log(
opts.setDefault
? `Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`
: `Default model available: ${OPENAI_CODEX_DEFAULT_MODEL} (use --set-default to apply)`,
);
return;
}
const providers = resolvePluginProviders({ config, workspaceDir });
if (providers.length === 0) {
throw new Error(
@@ -349,7 +404,6 @@ export async function modelsAuthLoginCommand(opts: LoginOptions, runtime: Runtim
);
}
const prompter = createClackPrompter();
const selectedProvider =
resolveProviderMatch(providers, opts.provider) ??
(await prompter

View File

@@ -0,0 +1,98 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { RuntimeEnv } from "../runtime.js";
import type { WizardPrompter } from "../wizard/prompts.js";
const mocks = vi.hoisted(() => ({
loginOpenAICodex: vi.fn(),
createVpsAwareOAuthHandlers: vi.fn(),
}));
vi.mock("@mariozechner/pi-ai", () => ({
loginOpenAICodex: mocks.loginOpenAICodex,
}));
vi.mock("./oauth-flow.js", () => ({
createVpsAwareOAuthHandlers: mocks.createVpsAwareOAuthHandlers,
}));
import { loginOpenAICodexOAuth } from "./openai-codex-oauth.js";
function createPrompter() {
const spin = { update: vi.fn(), stop: vi.fn() };
const prompter: Pick<WizardPrompter, "note" | "progress"> = {
note: vi.fn(async () => {}),
progress: vi.fn(() => spin),
};
return { prompter: prompter as unknown as WizardPrompter, spin };
}
function createRuntime(): RuntimeEnv {
return {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn((code: number) => {
throw new Error(`exit:${code}`);
}),
};
}
describe("loginOpenAICodexOAuth", () => {
beforeEach(() => {
vi.clearAllMocks();
});
it("returns credentials on successful oauth login", async () => {
const creds = {
provider: "openai-codex" as const,
access: "access-token",
refresh: "refresh-token",
expires: Date.now() + 60_000,
email: "user@example.com",
};
mocks.createVpsAwareOAuthHandlers.mockReturnValue({
onAuth: vi.fn(),
onPrompt: vi.fn(),
});
mocks.loginOpenAICodex.mockResolvedValue(creds);
const { prompter, spin } = createPrompter();
const runtime = createRuntime();
const result = await loginOpenAICodexOAuth({
prompter,
runtime,
isRemote: false,
openUrl: async () => {},
});
expect(result).toEqual(creds);
expect(mocks.loginOpenAICodex).toHaveBeenCalledOnce();
expect(spin.stop).toHaveBeenCalledWith("OpenAI OAuth complete");
expect(runtime.error).not.toHaveBeenCalled();
});
it("reports oauth errors and rethrows", async () => {
mocks.createVpsAwareOAuthHandlers.mockReturnValue({
onAuth: vi.fn(),
onPrompt: vi.fn(),
});
mocks.loginOpenAICodex.mockRejectedValue(new Error("oauth failed"));
const { prompter, spin } = createPrompter();
const runtime = createRuntime();
await expect(
loginOpenAICodexOAuth({
prompter,
runtime,
isRemote: true,
openUrl: async () => {},
}),
).rejects.toThrow("oauth failed");
expect(spin.stop).toHaveBeenCalledWith("OpenAI OAuth failed");
expect(runtime.error).toHaveBeenCalledWith(expect.stringContaining("oauth failed"));
expect(prompter.note).toHaveBeenCalledWith(
"Trouble with OAuth? See https://docs.openclaw.ai/start/faq",
"OAuth help",
);
});
});

View File

@@ -0,0 +1,55 @@
import type { OAuthCredentials } from "@mariozechner/pi-ai";
import { loginOpenAICodex } from "@mariozechner/pi-ai";
import type { RuntimeEnv } from "../runtime.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import { createVpsAwareOAuthHandlers } from "./oauth-flow.js";
export async function loginOpenAICodexOAuth(params: {
prompter: WizardPrompter;
runtime: RuntimeEnv;
isRemote: boolean;
openUrl: (url: string) => Promise<void>;
localBrowserMessage?: string;
}): Promise<OAuthCredentials | null> {
const { prompter, runtime, isRemote, openUrl, localBrowserMessage } = params;
await prompter.note(
isRemote
? [
"You are running in a remote/VPS environment.",
"A URL will be shown for you to open in your LOCAL browser.",
"After signing in, paste the redirect URL back here.",
].join("\n")
: [
"Browser will open for OpenAI authentication.",
"If the callback doesn't auto-complete, paste the redirect URL.",
"OpenAI OAuth uses localhost:1455 for the callback.",
].join("\n"),
"OpenAI Codex OAuth",
);
const spin = prompter.progress("Starting OAuth flow…");
try {
const { onAuth, onPrompt } = createVpsAwareOAuthHandlers({
isRemote,
prompter,
runtime,
spin,
openUrl,
localBrowserMessage: localBrowserMessage ?? "Complete sign-in in browser…",
});
const creds = await loginOpenAICodex({
onAuth,
onPrompt,
onProgress: (msg) => spin.update(msg),
});
spin.stop("OpenAI OAuth complete");
return creds ?? null;
} catch (err) {
spin.stop("OpenAI OAuth failed");
runtime.error(String(err));
await prompter.note("Trouble with OAuth? See https://docs.openclaw.ai/start/faq", "OAuth help");
throw err;
}
}