mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-07 18:41:24 +00:00
refactor(auth-profiles): unify coercion and add rejected-entry diagnostics
This commit is contained in:
@@ -1,9 +1,9 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { ensureAuthProfileStore } from "./auth-profiles.js";
|
||||
import { AUTH_STORE_VERSION } from "./auth-profiles/constants.js";
|
||||
import { AUTH_STORE_VERSION, log } from "./auth-profiles/constants.js";
|
||||
|
||||
describe("ensureAuthProfileStore", () => {
|
||||
it("migrates legacy auth.json and deletes it (PR #368)", () => {
|
||||
@@ -123,35 +123,155 @@ describe("ensureAuthProfileStore", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("accepts mode/apiKey aliases so users who follow openclaw.json format are not silently broken", () => {
|
||||
// A common mistake: users write auth-profiles.json using the same field names
|
||||
// as openclaw.json auth.profiles ("mode" + "apiKey") instead of the canonical
|
||||
// auth-profiles.json fields ("type" + "key"). The parser now normalises both.
|
||||
const agentDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-auth-alias-"));
|
||||
try {
|
||||
const storeWithAliases = {
|
||||
version: AUTH_STORE_VERSION,
|
||||
profiles: {
|
||||
"anthropic:work": {
|
||||
provider: "anthropic",
|
||||
mode: "api_key", // alias for "type"
|
||||
apiKey: "sk-ant-alias-test", // alias for "key"
|
||||
},
|
||||
it("normalizes auth-profiles credential aliases with canonical-field precedence", () => {
|
||||
const cases = [
|
||||
{
|
||||
name: "mode/apiKey aliases map to type/key",
|
||||
profile: {
|
||||
provider: "anthropic",
|
||||
mode: "api_key",
|
||||
apiKey: "sk-ant-alias",
|
||||
},
|
||||
};
|
||||
expected: {
|
||||
type: "api_key",
|
||||
key: "sk-ant-alias",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "canonical type overrides conflicting mode alias",
|
||||
profile: {
|
||||
provider: "anthropic",
|
||||
type: "api_key",
|
||||
mode: "token",
|
||||
key: "sk-ant-canonical",
|
||||
},
|
||||
expected: {
|
||||
type: "api_key",
|
||||
key: "sk-ant-canonical",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "canonical key overrides conflicting apiKey alias",
|
||||
profile: {
|
||||
provider: "anthropic",
|
||||
type: "api_key",
|
||||
key: "sk-ant-canonical",
|
||||
apiKey: "sk-ant-alias",
|
||||
},
|
||||
expected: {
|
||||
type: "api_key",
|
||||
key: "sk-ant-canonical",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "canonical profile shape remains unchanged",
|
||||
profile: {
|
||||
provider: "anthropic",
|
||||
type: "api_key",
|
||||
key: "sk-ant-direct",
|
||||
},
|
||||
expected: {
|
||||
type: "api_key",
|
||||
key: "sk-ant-direct",
|
||||
},
|
||||
},
|
||||
] as const;
|
||||
|
||||
for (const testCase of cases) {
|
||||
const agentDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-auth-alias-"));
|
||||
try {
|
||||
const storeData = {
|
||||
version: AUTH_STORE_VERSION,
|
||||
profiles: {
|
||||
"anthropic:work": testCase.profile,
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(
|
||||
path.join(agentDir, "auth-profiles.json"),
|
||||
`${JSON.stringify(storeData, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const store = ensureAuthProfileStore(agentDir);
|
||||
expect(store.profiles["anthropic:work"], testCase.name).toMatchObject(testCase.expected);
|
||||
} finally {
|
||||
fs.rmSync(agentDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it("normalizes mode/apiKey aliases while migrating legacy auth.json", () => {
|
||||
const agentDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-auth-legacy-alias-"));
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
path.join(agentDir, "auth-profiles.json"),
|
||||
`${JSON.stringify(storeWithAliases, null, 2)}\n`,
|
||||
path.join(agentDir, "auth.json"),
|
||||
`${JSON.stringify(
|
||||
{
|
||||
anthropic: {
|
||||
provider: "anthropic",
|
||||
mode: "api_key",
|
||||
apiKey: "sk-ant-legacy",
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const store = ensureAuthProfileStore(agentDir);
|
||||
const profile = store.profiles["anthropic:work"];
|
||||
expect(profile).toBeDefined();
|
||||
expect(profile?.type).toBe("api_key");
|
||||
expect((profile as { key?: string }).key).toBe("sk-ant-alias-test");
|
||||
expect(store.profiles["anthropic:default"]).toMatchObject({
|
||||
type: "api_key",
|
||||
provider: "anthropic",
|
||||
key: "sk-ant-legacy",
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(agentDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("logs one warning with aggregated reasons for rejected auth-profiles entries", () => {
|
||||
const agentDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-auth-invalid-"));
|
||||
const warnSpy = vi.spyOn(log, "warn").mockImplementation(() => undefined);
|
||||
try {
|
||||
const invalidStore = {
|
||||
version: AUTH_STORE_VERSION,
|
||||
profiles: {
|
||||
"anthropic:missing-type": {
|
||||
provider: "anthropic",
|
||||
},
|
||||
"openai:missing-provider": {
|
||||
type: "api_key",
|
||||
key: "sk-openai",
|
||||
},
|
||||
"qwen:not-object": "broken",
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(
|
||||
path.join(agentDir, "auth-profiles.json"),
|
||||
`${JSON.stringify(invalidStore, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
const store = ensureAuthProfileStore(agentDir);
|
||||
expect(store.profiles).toEqual({});
|
||||
expect(warnSpy).toHaveBeenCalledTimes(1);
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
"ignored invalid auth profile entries during store load",
|
||||
{
|
||||
source: "auth-profiles.json",
|
||||
dropped: 3,
|
||||
reasons: {
|
||||
invalid_type: 1,
|
||||
missing_provider: 1,
|
||||
non_object: 1,
|
||||
},
|
||||
keys: ["anthropic:missing-type", "openai:missing-provider", "qwen:not-object"],
|
||||
},
|
||||
);
|
||||
} finally {
|
||||
warnSpy.mockRestore();
|
||||
fs.rmSync(agentDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,14 +9,10 @@ import { ensureAuthStoreFile, resolveAuthStorePath, resolveLegacyAuthStorePath }
|
||||
import type { AuthProfileCredential, AuthProfileStore, ProfileUsageStats } from "./types.js";
|
||||
|
||||
type LegacyAuthStore = Record<string, AuthProfileCredential>;
|
||||
type CredentialRejectReason = "non_object" | "invalid_type" | "missing_provider";
|
||||
type RejectedCredentialEntry = { key: string; reason: CredentialRejectReason };
|
||||
|
||||
function _syncAuthProfileStore(target: AuthProfileStore, source: AuthProfileStore): void {
|
||||
target.version = source.version;
|
||||
target.profiles = source.profiles;
|
||||
target.order = source.order;
|
||||
target.lastGood = source.lastGood;
|
||||
target.usageStats = source.usageStats;
|
||||
}
|
||||
const AUTH_PROFILE_TYPES = new Set<AuthProfileCredential["type"]>(["api_key", "oauth", "token"]);
|
||||
|
||||
export async function updateAuthProfileStoreWithLock(params: {
|
||||
agentDir?: string;
|
||||
@@ -61,6 +57,49 @@ function normalizeRawCredentialEntry(raw: Record<string, unknown>): Partial<Auth
|
||||
return entry as Partial<AuthProfileCredential>;
|
||||
}
|
||||
|
||||
function parseCredentialEntry(
|
||||
raw: unknown,
|
||||
fallbackProvider?: string,
|
||||
): { ok: true; credential: AuthProfileCredential } | { ok: false; reason: CredentialRejectReason } {
|
||||
if (!raw || typeof raw !== "object") {
|
||||
return { ok: false, reason: "non_object" };
|
||||
}
|
||||
const typed = normalizeRawCredentialEntry(raw as Record<string, unknown>);
|
||||
if (!AUTH_PROFILE_TYPES.has(typed.type as AuthProfileCredential["type"])) {
|
||||
return { ok: false, reason: "invalid_type" };
|
||||
}
|
||||
const provider = typed.provider ?? fallbackProvider;
|
||||
if (typeof provider !== "string" || provider.trim().length === 0) {
|
||||
return { ok: false, reason: "missing_provider" };
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
credential: {
|
||||
...typed,
|
||||
provider,
|
||||
} as AuthProfileCredential,
|
||||
};
|
||||
}
|
||||
|
||||
function warnRejectedCredentialEntries(source: string, rejected: RejectedCredentialEntry[]): void {
|
||||
if (rejected.length === 0) {
|
||||
return;
|
||||
}
|
||||
const reasons = rejected.reduce(
|
||||
(acc, current) => {
|
||||
acc[current.reason] = (acc[current.reason] ?? 0) + 1;
|
||||
return acc;
|
||||
},
|
||||
{} as Partial<Record<CredentialRejectReason, number>>,
|
||||
);
|
||||
log.warn("ignored invalid auth profile entries during store load", {
|
||||
source,
|
||||
dropped: rejected.length,
|
||||
reasons,
|
||||
keys: rejected.slice(0, 10).map((entry) => entry.key),
|
||||
});
|
||||
}
|
||||
|
||||
function coerceLegacyStore(raw: unknown): LegacyAuthStore | null {
|
||||
if (!raw || typeof raw !== "object") {
|
||||
return null;
|
||||
@@ -70,19 +109,16 @@ function coerceLegacyStore(raw: unknown): LegacyAuthStore | null {
|
||||
return null;
|
||||
}
|
||||
const entries: LegacyAuthStore = {};
|
||||
const rejected: RejectedCredentialEntry[] = [];
|
||||
for (const [key, value] of Object.entries(record)) {
|
||||
if (!value || typeof value !== "object") {
|
||||
const parsed = parseCredentialEntry(value, key);
|
||||
if (!parsed.ok) {
|
||||
rejected.push({ key, reason: parsed.reason });
|
||||
continue;
|
||||
}
|
||||
const typed = normalizeRawCredentialEntry(value as Record<string, unknown>);
|
||||
if (typed.type !== "api_key" && typed.type !== "oauth" && typed.type !== "token") {
|
||||
continue;
|
||||
}
|
||||
entries[key] = {
|
||||
...typed,
|
||||
provider: String(typed.provider ?? key),
|
||||
} as AuthProfileCredential;
|
||||
entries[key] = parsed.credential;
|
||||
}
|
||||
warnRejectedCredentialEntries("auth.json", rejected);
|
||||
return Object.keys(entries).length > 0 ? entries : null;
|
||||
}
|
||||
|
||||
@@ -96,19 +132,16 @@ function coerceAuthStore(raw: unknown): AuthProfileStore | null {
|
||||
}
|
||||
const profiles = record.profiles as Record<string, unknown>;
|
||||
const normalized: Record<string, AuthProfileCredential> = {};
|
||||
const rejected: RejectedCredentialEntry[] = [];
|
||||
for (const [key, value] of Object.entries(profiles)) {
|
||||
if (!value || typeof value !== "object") {
|
||||
const parsed = parseCredentialEntry(value);
|
||||
if (!parsed.ok) {
|
||||
rejected.push({ key, reason: parsed.reason });
|
||||
continue;
|
||||
}
|
||||
const typed = normalizeRawCredentialEntry(value as Record<string, unknown>);
|
||||
if (typed.type !== "api_key" && typed.type !== "oauth" && typed.type !== "token") {
|
||||
continue;
|
||||
}
|
||||
if (!typed.provider) {
|
||||
continue;
|
||||
}
|
||||
normalized[key] = typed as AuthProfileCredential;
|
||||
normalized[key] = parsed.credential;
|
||||
}
|
||||
warnRejectedCredentialEntries("auth-profiles.json", rejected);
|
||||
const order =
|
||||
record.order && typeof record.order === "object"
|
||||
? Object.entries(record.order as Record<string, unknown>).reduce(
|
||||
@@ -242,19 +275,26 @@ function applyLegacyStore(store: AuthProfileStore, legacy: LegacyAuthStore): voi
|
||||
}
|
||||
}
|
||||
|
||||
function loadCoercedStoreWithExternalSync(authPath: string): AuthProfileStore | null {
|
||||
const raw = loadJsonFile(authPath);
|
||||
const store = coerceAuthStore(raw);
|
||||
if (!store) {
|
||||
return null;
|
||||
}
|
||||
// Sync from external CLI tools on every load.
|
||||
const synced = syncExternalCliCredentials(store);
|
||||
if (synced) {
|
||||
saveJsonFile(authPath, store);
|
||||
}
|
||||
return store;
|
||||
}
|
||||
|
||||
export function loadAuthProfileStore(): AuthProfileStore {
|
||||
const authPath = resolveAuthStorePath();
|
||||
const raw = loadJsonFile(authPath);
|
||||
const asStore = coerceAuthStore(raw);
|
||||
const asStore = loadCoercedStoreWithExternalSync(authPath);
|
||||
if (asStore) {
|
||||
// Sync from external CLI tools on every load
|
||||
const synced = syncExternalCliCredentials(asStore);
|
||||
if (synced) {
|
||||
saveJsonFile(authPath, asStore);
|
||||
}
|
||||
return asStore;
|
||||
}
|
||||
|
||||
const legacyRaw = loadJsonFile(resolveLegacyAuthStorePath());
|
||||
const legacy = coerceLegacyStore(legacyRaw);
|
||||
if (legacy) {
|
||||
@@ -277,14 +317,8 @@ function loadAuthProfileStoreForAgent(
|
||||
_options?: { allowKeychainPrompt?: boolean },
|
||||
): AuthProfileStore {
|
||||
const authPath = resolveAuthStorePath(agentDir);
|
||||
const raw = loadJsonFile(authPath);
|
||||
const asStore = coerceAuthStore(raw);
|
||||
const asStore = loadCoercedStoreWithExternalSync(authPath);
|
||||
if (asStore) {
|
||||
// Sync from external CLI tools on every load
|
||||
const synced = syncExternalCliCredentials(asStore);
|
||||
if (synced) {
|
||||
saveJsonFile(authPath, asStore);
|
||||
}
|
||||
return asStore;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user