mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-09 11:07:41 +00:00
fix(auth): harden openai-codex oauth login path
This commit is contained in:
182
src/commands/models/auth.test.ts
Normal file
182
src/commands/models/auth.test.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import type { RuntimeEnv } from "../../runtime.js";
|
||||
|
||||
const mocks = vi.hoisted(() => ({
|
||||
resolveDefaultAgentId: vi.fn(),
|
||||
resolveAgentDir: vi.fn(),
|
||||
resolveAgentWorkspaceDir: vi.fn(),
|
||||
resolveDefaultAgentWorkspaceDir: vi.fn(),
|
||||
resolvePluginProviders: vi.fn(),
|
||||
createClackPrompter: vi.fn(),
|
||||
loginOpenAICodexOAuth: vi.fn(),
|
||||
writeOAuthCredentials: vi.fn(),
|
||||
loadValidConfigOrThrow: vi.fn(),
|
||||
updateConfig: vi.fn(),
|
||||
logConfigUpdated: vi.fn(),
|
||||
openUrl: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("../../agents/agent-scope.js", () => ({
|
||||
resolveDefaultAgentId: mocks.resolveDefaultAgentId,
|
||||
resolveAgentDir: mocks.resolveAgentDir,
|
||||
resolveAgentWorkspaceDir: mocks.resolveAgentWorkspaceDir,
|
||||
}));
|
||||
|
||||
vi.mock("../../agents/workspace.js", () => ({
|
||||
resolveDefaultAgentWorkspaceDir: mocks.resolveDefaultAgentWorkspaceDir,
|
||||
}));
|
||||
|
||||
vi.mock("../../plugins/providers.js", () => ({
|
||||
resolvePluginProviders: mocks.resolvePluginProviders,
|
||||
}));
|
||||
|
||||
vi.mock("../../wizard/clack-prompter.js", () => ({
|
||||
createClackPrompter: mocks.createClackPrompter,
|
||||
}));
|
||||
|
||||
vi.mock("../openai-codex-oauth.js", () => ({
|
||||
loginOpenAICodexOAuth: mocks.loginOpenAICodexOAuth,
|
||||
}));
|
||||
|
||||
vi.mock("../onboard-auth.js", async (importActual) => {
|
||||
const actual = await importActual<typeof import("../onboard-auth.js")>();
|
||||
return {
|
||||
...actual,
|
||||
writeOAuthCredentials: mocks.writeOAuthCredentials,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("./shared.js", async (importActual) => {
|
||||
const actual = await importActual<typeof import("./shared.js")>();
|
||||
return {
|
||||
...actual,
|
||||
loadValidConfigOrThrow: mocks.loadValidConfigOrThrow,
|
||||
updateConfig: mocks.updateConfig,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("../../config/logging.js", () => ({
|
||||
logConfigUpdated: mocks.logConfigUpdated,
|
||||
}));
|
||||
|
||||
vi.mock("../onboard-helpers.js", () => ({
|
||||
openUrl: mocks.openUrl,
|
||||
}));
|
||||
|
||||
const { modelsAuthLoginCommand } = await import("./auth.js");
|
||||
|
||||
function createRuntime(): RuntimeEnv {
|
||||
return {
|
||||
log: vi.fn(),
|
||||
error: vi.fn(),
|
||||
exit: vi.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
function withInteractiveStdin() {
|
||||
const stdin = process.stdin as NodeJS.ReadStream & { isTTY?: boolean };
|
||||
const hadOwnIsTTY = Object.prototype.hasOwnProperty.call(stdin, "isTTY");
|
||||
const previousIsTTYDescriptor = Object.getOwnPropertyDescriptor(stdin, "isTTY");
|
||||
Object.defineProperty(stdin, "isTTY", {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: () => true,
|
||||
});
|
||||
return () => {
|
||||
if (previousIsTTYDescriptor) {
|
||||
Object.defineProperty(stdin, "isTTY", previousIsTTYDescriptor);
|
||||
} else if (!hadOwnIsTTY) {
|
||||
delete (stdin as { isTTY?: boolean }).isTTY;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
describe("modelsAuthLoginCommand", () => {
|
||||
let restoreStdin: (() => void) | null = null;
|
||||
let currentConfig: OpenClawConfig;
|
||||
let lastUpdatedConfig: OpenClawConfig | null;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
restoreStdin = withInteractiveStdin();
|
||||
currentConfig = {};
|
||||
lastUpdatedConfig = null;
|
||||
|
||||
mocks.resolveDefaultAgentId.mockReturnValue("main");
|
||||
mocks.resolveAgentDir.mockReturnValue("/tmp/openclaw/agents/main");
|
||||
mocks.resolveAgentWorkspaceDir.mockReturnValue("/tmp/openclaw/workspace");
|
||||
mocks.resolveDefaultAgentWorkspaceDir.mockReturnValue("/tmp/openclaw/workspace");
|
||||
mocks.loadValidConfigOrThrow.mockImplementation(async () => currentConfig);
|
||||
mocks.updateConfig.mockImplementation(
|
||||
async (mutator: (cfg: OpenClawConfig) => OpenClawConfig) => {
|
||||
lastUpdatedConfig = mutator(currentConfig);
|
||||
currentConfig = lastUpdatedConfig;
|
||||
return lastUpdatedConfig;
|
||||
},
|
||||
);
|
||||
mocks.createClackPrompter.mockReturnValue({
|
||||
note: vi.fn(async () => {}),
|
||||
select: vi.fn(),
|
||||
});
|
||||
mocks.loginOpenAICodexOAuth.mockResolvedValue({
|
||||
type: "oauth",
|
||||
provider: "openai-codex",
|
||||
access: "access-token",
|
||||
refresh: "refresh-token",
|
||||
expires: Date.now() + 60_000,
|
||||
email: "user@example.com",
|
||||
});
|
||||
mocks.writeOAuthCredentials.mockResolvedValue("openai-codex:user@example.com");
|
||||
mocks.resolvePluginProviders.mockReturnValue([]);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreStdin?.();
|
||||
restoreStdin = null;
|
||||
});
|
||||
|
||||
it("supports built-in openai-codex login without provider plugins", async () => {
|
||||
const runtime = createRuntime();
|
||||
|
||||
await modelsAuthLoginCommand({ provider: "openai-codex" }, runtime);
|
||||
|
||||
expect(mocks.loginOpenAICodexOAuth).toHaveBeenCalledOnce();
|
||||
expect(mocks.writeOAuthCredentials).toHaveBeenCalledWith(
|
||||
"openai-codex",
|
||||
expect.any(Object),
|
||||
"/tmp/openclaw/agents/main",
|
||||
{ syncSiblingAgents: true },
|
||||
);
|
||||
expect(mocks.resolvePluginProviders).not.toHaveBeenCalled();
|
||||
expect(lastUpdatedConfig?.auth?.profiles?.["openai-codex:user@example.com"]).toMatchObject({
|
||||
provider: "openai-codex",
|
||||
mode: "oauth",
|
||||
});
|
||||
expect(runtime.log).toHaveBeenCalledWith(
|
||||
"Auth profile: openai-codex:user@example.com (openai-codex/oauth)",
|
||||
);
|
||||
expect(runtime.log).toHaveBeenCalledWith(
|
||||
"Default model available: openai-codex/gpt-5.3-codex (use --set-default to apply)",
|
||||
);
|
||||
});
|
||||
|
||||
it("applies openai-codex default model when --set-default is used", async () => {
|
||||
const runtime = createRuntime();
|
||||
|
||||
await modelsAuthLoginCommand({ provider: "openai-codex", setDefault: true }, runtime);
|
||||
|
||||
expect(lastUpdatedConfig?.agents?.defaults?.model).toEqual({
|
||||
primary: "openai-codex/gpt-5.3-codex",
|
||||
});
|
||||
expect(runtime.log).toHaveBeenCalledWith("Default model set to openai-codex/gpt-5.3-codex");
|
||||
});
|
||||
|
||||
it("keeps existing plugin error behavior for non built-in providers", async () => {
|
||||
const runtime = createRuntime();
|
||||
|
||||
await expect(modelsAuthLoginCommand({ provider: "anthropic" }, runtime)).rejects.toThrow(
|
||||
"No provider plugins found.",
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -19,8 +19,13 @@ import { createClackPrompter } from "../../wizard/clack-prompter.js";
|
||||
import { validateAnthropicSetupToken } from "../auth-token.js";
|
||||
import { isRemoteEnvironment } from "../oauth-env.js";
|
||||
import { createVpsAwareOAuthHandlers } from "../oauth-flow.js";
|
||||
import { applyAuthProfileConfig } from "../onboard-auth.js";
|
||||
import { applyAuthProfileConfig, writeOAuthCredentials } from "../onboard-auth.js";
|
||||
import { openUrl } from "../onboard-helpers.js";
|
||||
import {
|
||||
applyOpenAICodexModelDefault,
|
||||
OPENAI_CODEX_DEFAULT_MODEL,
|
||||
} from "../openai-codex-model-default.js";
|
||||
import { loginOpenAICodexOAuth } from "../openai-codex-oauth.js";
|
||||
import {
|
||||
applyDefaultModel,
|
||||
mergeConfigPatch,
|
||||
@@ -272,6 +277,51 @@ function credentialMode(credential: AuthProfileCredential): "api_key" | "oauth"
|
||||
return "oauth";
|
||||
}
|
||||
|
||||
async function runBuiltInOpenAICodexLogin(params: {
|
||||
opts: LoginOptions;
|
||||
runtime: RuntimeEnv;
|
||||
prompter: ReturnType<typeof createClackPrompter>;
|
||||
agentDir: string;
|
||||
}) {
|
||||
const creds = await loginOpenAICodexOAuth({
|
||||
prompter: params.prompter,
|
||||
runtime: params.runtime,
|
||||
isRemote: isRemoteEnvironment(),
|
||||
openUrl: async (url) => {
|
||||
await openUrl(url);
|
||||
},
|
||||
localBrowserMessage: "Complete sign-in in browser…",
|
||||
});
|
||||
if (!creds) {
|
||||
throw new Error("OpenAI Codex OAuth did not return credentials.");
|
||||
}
|
||||
|
||||
const profileId = await writeOAuthCredentials("openai-codex", creds, params.agentDir, {
|
||||
syncSiblingAgents: true,
|
||||
});
|
||||
await updateConfig((cfg) => {
|
||||
let next = applyAuthProfileConfig(cfg, {
|
||||
profileId,
|
||||
provider: "openai-codex",
|
||||
mode: "oauth",
|
||||
});
|
||||
if (params.opts.setDefault) {
|
||||
next = applyOpenAICodexModelDefault(next).next;
|
||||
}
|
||||
return next;
|
||||
});
|
||||
|
||||
logConfigUpdated(params.runtime);
|
||||
params.runtime.log(`Auth profile: ${profileId} (openai-codex/oauth)`);
|
||||
if (params.opts.setDefault) {
|
||||
params.runtime.log(`Default model set to ${OPENAI_CODEX_DEFAULT_MODEL}`);
|
||||
} else {
|
||||
params.runtime.log(
|
||||
`Default model available: ${OPENAI_CODEX_DEFAULT_MODEL} (use --set-default to apply)`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function modelsAuthLoginCommand(opts: LoginOptions, runtime: RuntimeEnv) {
|
||||
if (!process.stdin.isTTY) {
|
||||
throw new Error("models auth login requires an interactive TTY.");
|
||||
@@ -282,6 +332,18 @@ export async function modelsAuthLoginCommand(opts: LoginOptions, runtime: Runtim
|
||||
const agentDir = resolveAgentDir(config, defaultAgentId);
|
||||
const workspaceDir =
|
||||
resolveAgentWorkspaceDir(config, defaultAgentId) ?? resolveDefaultAgentWorkspaceDir();
|
||||
const requestedProviderId = normalizeProviderId(String(opts.provider ?? ""));
|
||||
const prompter = createClackPrompter();
|
||||
|
||||
if (requestedProviderId === "openai-codex") {
|
||||
await runBuiltInOpenAICodexLogin({
|
||||
opts,
|
||||
runtime,
|
||||
prompter,
|
||||
agentDir,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const providers = resolvePluginProviders({ config, workspaceDir });
|
||||
if (providers.length === 0) {
|
||||
@@ -290,7 +352,6 @@ export async function modelsAuthLoginCommand(opts: LoginOptions, runtime: Runtim
|
||||
);
|
||||
}
|
||||
|
||||
const prompter = createClackPrompter();
|
||||
const requestedProvider = resolveRequestedLoginProviderOrThrow(providers, opts.provider);
|
||||
const selectedProvider =
|
||||
requestedProvider ??
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import type { RuntimeEnv } from "../runtime.js";
|
||||
import type { WizardPrompter } from "../wizard/prompts.js";
|
||||
|
||||
@@ -56,10 +56,30 @@ async function runCodexOAuth(params: { isRemote: boolean }) {
|
||||
}
|
||||
|
||||
describe("loginOpenAICodexOAuth", () => {
|
||||
let restoreFetch: (() => void) | null = null;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mocks.runOpenAIOAuthTlsPreflight.mockResolvedValue({ ok: true });
|
||||
mocks.formatOpenAIOAuthTlsPreflightFix.mockReturnValue("tls fix");
|
||||
|
||||
const originalFetch = globalThis.fetch;
|
||||
const fetchMock = vi.fn(
|
||||
async () =>
|
||||
new Response('{"error":{"message":"model is required"}}', {
|
||||
status: 400,
|
||||
headers: { "content-type": "application/json" },
|
||||
}),
|
||||
);
|
||||
globalThis.fetch = fetchMock as unknown as typeof fetch;
|
||||
restoreFetch = () => {
|
||||
globalThis.fetch = originalFetch;
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreFetch?.();
|
||||
restoreFetch = null;
|
||||
});
|
||||
|
||||
it("returns credentials on successful oauth login", async () => {
|
||||
@@ -136,6 +156,53 @@ describe("loginOpenAICodexOAuth", () => {
|
||||
expect(runtime.error).not.toHaveBeenCalledWith("tls fix");
|
||||
expect(prompter.note).not.toHaveBeenCalledWith("tls fix", "OAuth prerequisites");
|
||||
});
|
||||
|
||||
it("fails with actionable error when token is missing api.responses.write scope", async () => {
|
||||
mocks.createVpsAwareOAuthHandlers.mockReturnValue({
|
||||
onAuth: vi.fn(),
|
||||
onPrompt: vi.fn(),
|
||||
});
|
||||
mocks.loginOpenAICodex.mockResolvedValue({
|
||||
provider: "openai-codex" as const,
|
||||
access: "access-token",
|
||||
refresh: "refresh-token",
|
||||
expires: Date.now() + 60_000,
|
||||
email: "user@example.com",
|
||||
});
|
||||
globalThis.fetch = vi.fn(
|
||||
async () =>
|
||||
new Response('{"error":{"message":"Missing scopes: api.responses.write"}}', {
|
||||
status: 401,
|
||||
headers: { "content-type": "application/json" },
|
||||
}),
|
||||
) as unknown as typeof fetch;
|
||||
|
||||
await expect(runCodexOAuth({ isRemote: false })).rejects.toThrow(
|
||||
"missing required scope: api.responses.write",
|
||||
);
|
||||
});
|
||||
|
||||
it("does not fail oauth completion when scope probe is unavailable", async () => {
|
||||
const creds = {
|
||||
provider: "openai-codex" as const,
|
||||
access: "access-token",
|
||||
refresh: "refresh-token",
|
||||
expires: Date.now() + 60_000,
|
||||
email: "user@example.com",
|
||||
};
|
||||
mocks.createVpsAwareOAuthHandlers.mockReturnValue({
|
||||
onAuth: vi.fn(),
|
||||
onPrompt: vi.fn(),
|
||||
});
|
||||
mocks.loginOpenAICodex.mockResolvedValue(creds);
|
||||
globalThis.fetch = vi.fn(async () => {
|
||||
throw new Error("network down");
|
||||
}) as unknown as typeof fetch;
|
||||
|
||||
const { result } = await runCodexOAuth({ isRemote: false });
|
||||
expect(result).toEqual(creds);
|
||||
});
|
||||
|
||||
it("fails early with actionable message when TLS preflight fails", async () => {
|
||||
mocks.runOpenAIOAuthTlsPreflight.mockResolvedValue({
|
||||
ok: false,
|
||||
|
||||
@@ -8,6 +8,41 @@ import {
|
||||
runOpenAIOAuthTlsPreflight,
|
||||
} from "./oauth-tls-preflight.js";
|
||||
|
||||
const OPENAI_RESPONSES_ENDPOINT = "https://api.openai.com/v1/responses";
|
||||
const OPENAI_RESPONSES_WRITE_SCOPE = "api.responses.write";
|
||||
|
||||
function extractResponsesScopeErrorMessage(status: number, bodyText: string): string | null {
|
||||
if (status !== 401) {
|
||||
return null;
|
||||
}
|
||||
const normalized = bodyText.toLowerCase();
|
||||
if (
|
||||
normalized.includes("missing scope") &&
|
||||
normalized.includes(OPENAI_RESPONSES_WRITE_SCOPE.toLowerCase())
|
||||
) {
|
||||
return bodyText.trim() || `Missing scopes: ${OPENAI_RESPONSES_WRITE_SCOPE}`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function detectMissingResponsesWriteScope(accessToken: string): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch(OPENAI_RESPONSES_ENDPOINT, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: "{}",
|
||||
});
|
||||
const bodyText = await response.text();
|
||||
return extractResponsesScopeErrorMessage(response.status, bodyText);
|
||||
} catch {
|
||||
// Best effort only: network/TLS issues should not block successful OAuth completion.
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function loginOpenAICodexOAuth(params: {
|
||||
prompter: WizardPrompter;
|
||||
runtime: RuntimeEnv;
|
||||
@@ -55,6 +90,18 @@ export async function loginOpenAICodexOAuth(params: {
|
||||
onPrompt,
|
||||
onProgress: (msg) => spin.update(msg),
|
||||
});
|
||||
if (creds?.access) {
|
||||
const scopeError = await detectMissingResponsesWriteScope(creds.access);
|
||||
if (scopeError) {
|
||||
throw new Error(
|
||||
[
|
||||
`OpenAI OAuth token is missing required scope: ${OPENAI_RESPONSES_WRITE_SCOPE}.`,
|
||||
`Provider response: ${scopeError}`,
|
||||
"Re-authenticate with OpenAI Codex OAuth or use OPENAI_API_KEY with openai/* models.",
|
||||
].join(" "),
|
||||
);
|
||||
}
|
||||
}
|
||||
spin.stop("OpenAI OAuth complete");
|
||||
return creds ?? null;
|
||||
} catch (err) {
|
||||
|
||||
Reference in New Issue
Block a user