Feat/litellm provider (#12823)

* feat: add LiteLLM provider types, env var, credentials, and auth choice

Add litellm-api-key auth choice, LITELLM_API_KEY env var mapping,
setLitellmApiKey() credential storage, and LITELLM_DEFAULT_MODEL_REF.

* feat: add LiteLLM onboarding handler and provider config

Add applyLitellmProviderConfig which properly registers
models.providers.litellm with baseUrl, api type, and model definitions.
This fixes the critical bug from PR #6488 where the provider entry was
never created, causing model resolution to fail at runtime.

* docs: add LiteLLM provider documentation

Add setup guide covering onboarding, manual config, virtual keys,
model routing, and usage tracking. Link from provider index.

* docs: add LiteLLM to sidebar navigation in docs.json

Add providers/litellm to both English and Chinese provider page lists
so the docs page appears in the sidebar navigation.

* test: add LiteLLM non-interactive onboarding test

Wire up litellmApiKey flag inference and auth-choice handler for the
non-interactive onboarding path, and add an integration test covering
profile, model default, and credential storage.

* fix: register --litellm-api-key CLI flag and add preferred provider mapping

Wire up the missing Commander CLI option, action handler mapping, and
help text for --litellm-api-key. Add litellm-api-key to the preferred
provider map for consistency with other providers.

* fix: remove zh-CN sidebar entry for litellm (no localized page yet)

* style: format buildLitellmModelDefinition return type

* fix(onboarding): harden LiteLLM provider setup (#12823)

* refactor(onboarding): keep auth-choice provider dispatcher under size limit

---------

Co-authored-by: Peter Steinberger <steipete@gmail.com>
This commit is contained in:
ryan-crabbe
2026-02-11 02:46:56 -08:00
committed by GitHub
parent 5741b6cb3f
commit a36b9be245
19 changed files with 612 additions and 1 deletions

View File

@@ -13,6 +13,7 @@ export type AuthChoiceGroupId =
| "google"
| "copilot"
| "openrouter"
| "litellm"
| "ai-gateway"
| "cloudflare-ai-gateway"
| "moonshot"
@@ -143,6 +144,12 @@ const AUTH_CHOICE_GROUP_DEFS: {
hint: "Privacy-focused (uncensored models)",
choices: ["venice-api-key"],
},
{
value: "litellm",
label: "LiteLLM",
hint: "Unified LLM gateway (100+ providers)",
choices: ["litellm-api-key"],
},
{
value: "cloudflare-ai-gateway",
label: "Cloudflare AI Gateway",
@@ -182,6 +189,11 @@ export function buildAuthChoiceOptions(params: {
label: "Qianfan API key",
});
options.push({ value: "openrouter-api-key", label: "OpenRouter API key" });
options.push({
value: "litellm-api-key",
label: "LiteLLM API key",
hint: "Unified gateway for 100+ LLM providers",
});
options.push({
value: "ai-gateway-api-key",
label: "Vercel AI Gateway API key",

View File

@@ -19,6 +19,8 @@ import {
applyQianfanProviderConfig,
applyKimiCodeConfig,
applyKimiCodeProviderConfig,
applyLitellmConfig,
applyLitellmProviderConfig,
applyMoonshotConfig,
applyMoonshotConfigCn,
applyMoonshotProviderConfig,
@@ -39,6 +41,7 @@ import {
applyXiaomiProviderConfig,
applyZaiConfig,
CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF,
LITELLM_DEFAULT_MODEL_REF,
QIANFAN_DEFAULT_MODEL_REF,
KIMI_CODING_MODEL_REF,
MOONSHOT_DEFAULT_MODEL_REF,
@@ -51,6 +54,7 @@ import {
setCloudflareAiGatewayConfig,
setQianfanApiKey,
setGeminiApiKey,
setLitellmApiKey,
setKimiCodingApiKey,
setMoonshotApiKey,
setOpencodeZenApiKey,
@@ -89,6 +93,8 @@ export async function applyAuthChoiceApiProviders(
) {
if (params.opts.tokenProvider === "openrouter") {
authChoice = "openrouter-api-key";
} else if (params.opts.tokenProvider === "litellm") {
authChoice = "litellm-api-key";
} else if (params.opts.tokenProvider === "vercel-ai-gateway") {
authChoice = "ai-gateway-api-key";
} else if (params.opts.tokenProvider === "cloudflare-ai-gateway") {
@@ -197,6 +203,69 @@ export async function applyAuthChoiceApiProviders(
return { config: nextConfig, agentModelOverride };
}
if (authChoice === "litellm-api-key") {
const store = ensureAuthProfileStore(params.agentDir, { allowKeychainPrompt: false });
const profileOrder = resolveAuthProfileOrder({ cfg: nextConfig, store, provider: "litellm" });
const existingProfileId = profileOrder.find((profileId) => Boolean(store.profiles[profileId]));
const existingCred = existingProfileId ? store.profiles[existingProfileId] : undefined;
let profileId = "litellm:default";
let hasCredential = false;
if (existingProfileId && existingCred?.type === "api_key") {
profileId = existingProfileId;
hasCredential = true;
}
if (!hasCredential && params.opts?.token && params.opts?.tokenProvider === "litellm") {
await setLitellmApiKey(normalizeApiKeyInput(params.opts.token), params.agentDir);
hasCredential = true;
}
if (!hasCredential) {
await params.prompter.note(
"LiteLLM provides a unified API to 100+ LLM providers.\nGet your API key from your LiteLLM proxy or https://litellm.ai\nDefault proxy runs on http://localhost:4000",
"LiteLLM",
);
const envKey = resolveEnvApiKey("litellm");
if (envKey) {
const useExisting = await params.prompter.confirm({
message: `Use existing LITELLM_API_KEY (${envKey.source}, ${formatApiKeyPreview(envKey.apiKey)})?`,
initialValue: true,
});
if (useExisting) {
await setLitellmApiKey(envKey.apiKey, params.agentDir);
hasCredential = true;
}
}
if (!hasCredential) {
const key = await params.prompter.text({
message: "Enter LiteLLM API key",
validate: validateApiKeyInput,
});
await setLitellmApiKey(normalizeApiKeyInput(String(key)), params.agentDir);
hasCredential = true;
}
}
if (hasCredential) {
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId,
provider: "litellm",
mode: "api_key",
});
}
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: LITELLM_DEFAULT_MODEL_REF,
applyDefaultConfig: applyLitellmConfig,
applyProviderConfig: applyLitellmProviderConfig,
noteDefault: LITELLM_DEFAULT_MODEL_REF,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
return { config: nextConfig, agentModelOverride };
}
if (authChoice === "ai-gateway-api-key") {
let hasCredential = false;

View File

@@ -32,6 +32,7 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
minimax: "lmstudio",
"opencode-zen": "opencode",
"xai-api-key": "xai",
"litellm-api-key": "litellm",
"qwen-portal": "qwen-portal",
"minimax-portal": "minimax-portal",
"qianfan-api-key": "qianfan",

View File

@@ -32,6 +32,7 @@ describe("applyAuthChoice", () => {
const previousAgentDir = process.env.OPENCLAW_AGENT_DIR;
const previousPiAgentDir = process.env.PI_CODING_AGENT_DIR;
const previousOpenrouterKey = process.env.OPENROUTER_API_KEY;
const previousLitellmKey = process.env.LITELLM_API_KEY;
const previousAiGatewayKey = process.env.AI_GATEWAY_API_KEY;
const previousCloudflareGatewayKey = process.env.CLOUDFLARE_AI_GATEWAY_API_KEY;
const previousSshTty = process.env.SSH_TTY;
@@ -65,6 +66,11 @@ describe("applyAuthChoice", () => {
} else {
process.env.OPENROUTER_API_KEY = previousOpenrouterKey;
}
if (previousLitellmKey === undefined) {
delete process.env.LITELLM_API_KEY;
} else {
process.env.LITELLM_API_KEY = previousLitellmKey;
}
if (previousAiGatewayKey === undefined) {
delete process.env.AI_GATEWAY_API_KEY;
} else {
@@ -402,6 +408,96 @@ describe("applyAuthChoice", () => {
delete process.env.OPENROUTER_API_KEY;
});
it("ignores legacy LiteLLM oauth profiles when selecting litellm-api-key", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;
process.env.OPENCLAW_AGENT_DIR = path.join(tempStateDir, "agent");
process.env.PI_CODING_AGENT_DIR = process.env.OPENCLAW_AGENT_DIR;
process.env.LITELLM_API_KEY = "sk-litellm-test";
const authProfilePath = authProfilePathFor(requireAgentDir());
await fs.mkdir(path.dirname(authProfilePath), { recursive: true });
await fs.writeFile(
authProfilePath,
JSON.stringify(
{
version: 1,
profiles: {
"litellm:legacy": {
type: "oauth",
provider: "litellm",
access: "access-token",
refresh: "refresh-token",
expires: Date.now() + 60_000,
},
},
},
null,
2,
),
"utf8",
);
const text = vi.fn();
const select: WizardPrompter["select"] = vi.fn(
async (params) => params.options[0]?.value as never,
);
const multiselect: WizardPrompter["multiselect"] = vi.fn(async () => []);
const confirm = vi.fn(async () => true);
const prompter: WizardPrompter = {
intro: vi.fn(noopAsync),
outro: vi.fn(noopAsync),
note: vi.fn(noopAsync),
select,
multiselect,
text,
confirm,
progress: vi.fn(() => ({ update: noop, stop: noop })),
};
const runtime: RuntimeEnv = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn((code: number) => {
throw new Error(`exit:${code}`);
}),
};
const result = await applyAuthChoice({
authChoice: "litellm-api-key",
config: {
auth: {
profiles: {
"litellm:legacy": { provider: "litellm", mode: "oauth" },
},
order: { litellm: ["litellm:legacy"] },
},
},
prompter,
runtime,
setDefaultModel: true,
});
expect(confirm).toHaveBeenCalledWith(
expect.objectContaining({
message: expect.stringContaining("LITELLM_API_KEY"),
}),
);
expect(text).not.toHaveBeenCalled();
expect(result.config.auth?.profiles?.["litellm:default"]).toMatchObject({
provider: "litellm",
mode: "api_key",
});
const raw = await fs.readFile(authProfilePath, "utf8");
const parsed = JSON.parse(raw) as {
profiles?: Record<string, { type?: string; key?: string }>;
};
expect(parsed.profiles?.["litellm:default"]).toMatchObject({
type: "api_key",
key: "sk-litellm-test",
});
});
it("uses existing AI_GATEWAY_API_KEY when selecting ai-gateway-api-key", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;

View File

@@ -29,6 +29,7 @@ import {
} from "../agents/venice-models.js";
import {
CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF,
LITELLM_DEFAULT_MODEL_REF,
OPENROUTER_DEFAULT_MODEL_REF,
TOGETHER_DEFAULT_MODEL_REF,
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
@@ -252,6 +253,105 @@ export function applyOpenrouterConfig(cfg: OpenClawConfig): OpenClawConfig {
};
}
export const LITELLM_BASE_URL = "http://localhost:4000";
export const LITELLM_DEFAULT_MODEL_ID = "claude-opus-4-6";
const LITELLM_DEFAULT_CONTEXT_WINDOW = 128_000;
const LITELLM_DEFAULT_MAX_TOKENS = 8_192;
const LITELLM_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
function buildLitellmModelDefinition(): {
id: string;
name: string;
reasoning: boolean;
input: Array<"text" | "image">;
cost: { input: number; output: number; cacheRead: number; cacheWrite: number };
contextWindow: number;
maxTokens: number;
} {
return {
id: LITELLM_DEFAULT_MODEL_ID,
name: "Claude Opus 4.6",
reasoning: true,
input: ["text", "image"],
// LiteLLM routes to many upstreams; keep neutral placeholders.
cost: LITELLM_DEFAULT_COST,
contextWindow: LITELLM_DEFAULT_CONTEXT_WINDOW,
maxTokens: LITELLM_DEFAULT_MAX_TOKENS,
};
}
export function applyLitellmProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
models[LITELLM_DEFAULT_MODEL_REF] = {
...models[LITELLM_DEFAULT_MODEL_REF],
alias: models[LITELLM_DEFAULT_MODEL_REF]?.alias ?? "LiteLLM",
};
const providers = { ...cfg.models?.providers };
const existingProvider = providers.litellm;
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
const defaultModel = buildLitellmModelDefinition();
const hasDefaultModel = existingModels.some((model) => model.id === LITELLM_DEFAULT_MODEL_ID);
const mergedModels = hasDefaultModel ? existingModels : [...existingModels, defaultModel];
const { apiKey: existingApiKey, ...existingProviderRest } = (existingProvider ?? {}) as Record<
string,
unknown
> as { apiKey?: string };
const resolvedBaseUrl =
typeof existingProvider?.baseUrl === "string" ? existingProvider.baseUrl.trim() : "";
const resolvedApiKey = typeof existingApiKey === "string" ? existingApiKey : undefined;
const normalizedApiKey = resolvedApiKey?.trim();
providers.litellm = {
...existingProviderRest,
baseUrl: resolvedBaseUrl || LITELLM_BASE_URL,
api: "openai-completions",
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : [defaultModel],
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: {
mode: cfg.models?.mode ?? "merge",
providers,
},
};
}
export function applyLitellmConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = applyLitellmProviderConfig(cfg);
const existingModel = next.agents?.defaults?.model;
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(existingModel && "fallbacks" in (existingModel as Record<string, unknown>)
? {
fallbacks: (existingModel as { fallbacks?: string[] }).fallbacks,
}
: undefined),
primary: LITELLM_DEFAULT_MODEL_REF,
},
},
},
};
}
export function applyMoonshotProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
return applyMoonshotProviderConfigWithBaseUrl(cfg, MOONSHOT_BASE_URL);
}

View File

@@ -119,6 +119,7 @@ export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
export const XIAOMI_DEFAULT_MODEL_REF = "xiaomi/mimo-v2-flash";
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
export const TOGETHER_DEFAULT_MODEL_REF = "together/moonshotai/Kimi-K2.5";
export const LITELLM_DEFAULT_MODEL_REF = "litellm/claude-opus-4-6";
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.6";
export async function setZaiApiKey(key: string, agentDir?: string) {
@@ -182,6 +183,18 @@ export async function setCloudflareAiGatewayConfig(
});
}
export async function setLitellmApiKey(key: string, agentDir?: string) {
upsertAuthProfile({
profileId: "litellm:default",
credential: {
type: "api_key",
provider: "litellm",
key,
},
agentDir: resolveAuthAgentDir(agentDir),
});
}
export async function setVercelAiGatewayApiKey(key: string, agentDir?: string) {
upsertAuthProfile({
profileId: "vercel-ai-gateway:default",

View File

@@ -5,6 +5,7 @@ import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import {
applyAuthProfileConfig,
applyLitellmProviderConfig,
applyMinimaxApiConfig,
applyMinimaxApiProviderConfig,
applyOpencodeZenConfig,
@@ -511,6 +512,41 @@ describe("applyOpenrouterProviderConfig", () => {
});
});
describe("applyLitellmProviderConfig", () => {
it("preserves existing baseUrl and api key while adding the default model", () => {
const cfg = applyLitellmProviderConfig({
models: {
providers: {
litellm: {
baseUrl: "https://litellm.example/v1",
apiKey: " old-key ",
api: "anthropic-messages",
models: [
{
id: "custom-model",
name: "Custom",
reasoning: false,
input: ["text"],
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1000,
maxTokens: 100,
},
],
},
},
},
});
expect(cfg.models?.providers?.litellm?.baseUrl).toBe("https://litellm.example/v1");
expect(cfg.models?.providers?.litellm?.api).toBe("openai-completions");
expect(cfg.models?.providers?.litellm?.apiKey).toBe("old-key");
expect(cfg.models?.providers?.litellm?.models.map((m) => m.id)).toEqual([
"custom-model",
"claude-opus-4-6",
]);
});
});
describe("applyOpenrouterConfig", () => {
it("sets correct primary model", () => {
const cfg = applyOpenrouterConfig({});

View File

@@ -11,6 +11,8 @@ export {
applyQianfanProviderConfig,
applyKimiCodeConfig,
applyKimiCodeProviderConfig,
applyLitellmConfig,
applyLitellmProviderConfig,
applyMoonshotConfig,
applyMoonshotConfigCn,
applyMoonshotProviderConfig,
@@ -46,11 +48,13 @@ export {
} from "./onboard-auth.config-opencode.js";
export {
CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF,
LITELLM_DEFAULT_MODEL_REF,
OPENROUTER_DEFAULT_MODEL_REF,
setAnthropicApiKey,
setCloudflareAiGatewayConfig,
setQianfanApiKey,
setGeminiApiKey,
setLitellmApiKey,
setKimiCodingApiKey,
setMinimaxApiKey,
setMoonshotApiKey,

View File

@@ -0,0 +1,91 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { describe, expect, it, vi } from "vitest";
describe("onboard (non-interactive): LiteLLM", () => {
it("stores the API key and configures the default model", async () => {
const prev = {
home: process.env.HOME,
stateDir: process.env.OPENCLAW_STATE_DIR,
configPath: process.env.OPENCLAW_CONFIG_PATH,
skipChannels: process.env.OPENCLAW_SKIP_CHANNELS,
skipGmail: process.env.OPENCLAW_SKIP_GMAIL_WATCHER,
skipCron: process.env.OPENCLAW_SKIP_CRON,
skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST,
token: process.env.OPENCLAW_GATEWAY_TOKEN,
password: process.env.OPENCLAW_GATEWAY_PASSWORD,
};
process.env.OPENCLAW_SKIP_CHANNELS = "1";
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = "1";
process.env.OPENCLAW_SKIP_CRON = "1";
process.env.OPENCLAW_SKIP_CANVAS_HOST = "1";
delete process.env.OPENCLAW_GATEWAY_TOKEN;
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-onboard-litellm-"));
process.env.HOME = tempHome;
process.env.OPENCLAW_STATE_DIR = tempHome;
process.env.OPENCLAW_CONFIG_PATH = path.join(tempHome, "openclaw.json");
vi.resetModules();
const runtime = {
log: () => {},
error: (msg: string) => {
throw new Error(msg);
},
exit: (code: number) => {
throw new Error(`exit:${code}`);
},
};
try {
const { runNonInteractiveOnboarding } = await import("./onboard-non-interactive.js");
await runNonInteractiveOnboarding(
{
nonInteractive: true,
authChoice: "litellm-api-key",
litellmApiKey: "litellm-test-key",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const { CONFIG_PATH } = await import("../config/config.js");
const cfg = JSON.parse(await fs.readFile(CONFIG_PATH, "utf8")) as {
auth?: {
profiles?: Record<string, { provider?: string; mode?: string }>;
};
agents?: { defaults?: { model?: { primary?: string } } };
};
expect(cfg.auth?.profiles?.["litellm:default"]?.provider).toBe("litellm");
expect(cfg.auth?.profiles?.["litellm:default"]?.mode).toBe("api_key");
expect(cfg.agents?.defaults?.model?.primary).toBe("litellm/claude-opus-4-6");
const { ensureAuthProfileStore } = await import("../agents/auth-profiles.js");
const store = ensureAuthProfileStore();
const profile = store.profiles["litellm:default"];
expect(profile?.type).toBe("api_key");
if (profile?.type === "api_key") {
expect(profile.provider).toBe("litellm");
expect(profile.key).toBe("litellm-test-key");
}
} finally {
await fs.rm(tempHome, { recursive: true, force: true });
process.env.HOME = prev.home;
process.env.OPENCLAW_STATE_DIR = prev.stateDir;
process.env.OPENCLAW_CONFIG_PATH = prev.configPath;
process.env.OPENCLAW_SKIP_CHANNELS = prev.skipChannels;
process.env.OPENCLAW_SKIP_GMAIL_WATCHER = prev.skipGmail;
process.env.OPENCLAW_SKIP_CRON = prev.skipCron;
process.env.OPENCLAW_SKIP_CANVAS_HOST = prev.skipCanvas;
process.env.OPENCLAW_GATEWAY_TOKEN = prev.token;
process.env.OPENCLAW_GATEWAY_PASSWORD = prev.password;
}
}, 60_000);
});

View File

@@ -23,6 +23,7 @@ type AuthChoiceFlagOptions = Pick<
| "minimaxApiKey"
| "opencodeZenApiKey"
| "xaiApiKey"
| "litellmApiKey"
>;
const AUTH_CHOICE_FLAG_MAP = [
@@ -45,6 +46,7 @@ const AUTH_CHOICE_FLAG_MAP = [
{ flag: "xaiApiKey", authChoice: "xai-api-key", label: "--xai-api-key" },
{ flag: "minimaxApiKey", authChoice: "minimax-api", label: "--minimax-api-key" },
{ flag: "opencodeZenApiKey", authChoice: "opencode-zen", label: "--opencode-zen-api-key" },
{ flag: "litellmApiKey", authChoice: "litellm-api-key", label: "--litellm-api-key" },
] satisfies ReadonlyArray<AuthChoiceFlag>;
export type AuthChoiceInference = {

View File

@@ -24,6 +24,7 @@ import {
applyVeniceConfig,
applyTogetherConfig,
applyVercelAiGatewayConfig,
applyLitellmConfig,
applyXaiConfig,
applyXiaomiConfig,
applyZaiConfig,
@@ -32,6 +33,7 @@ import {
setQianfanApiKey,
setGeminiApiKey,
setKimiCodingApiKey,
setLitellmApiKey,
setMinimaxApiKey,
setMoonshotApiKey,
setOpencodeZenApiKey,
@@ -314,6 +316,29 @@ export async function applyNonInteractiveAuthChoice(params: {
return applyOpenrouterConfig(nextConfig);
}
if (authChoice === "litellm-api-key") {
const resolved = await resolveNonInteractiveApiKey({
provider: "litellm",
cfg: baseConfig,
flagValue: opts.litellmApiKey,
flagName: "--litellm-api-key",
envVar: "LITELLM_API_KEY",
runtime,
});
if (!resolved) {
return null;
}
if (resolved.source !== "profile") {
await setLitellmApiKey(resolved.key);
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "litellm:default",
provider: "litellm",
mode: "api_key",
});
return applyLitellmConfig(nextConfig);
}
if (authChoice === "ai-gateway-api-key") {
const resolved = await resolveNonInteractiveApiKey({
provider: "vercel-ai-gateway",

View File

@@ -12,6 +12,7 @@ export type AuthChoice =
| "openai-codex"
| "openai-api-key"
| "openrouter-api-key"
| "litellm-api-key"
| "ai-gateway-api-key"
| "cloudflare-ai-gateway-api-key"
| "moonshot-api-key"
@@ -89,6 +90,7 @@ export type OnboardOptions = {
anthropicApiKey?: string;
openaiApiKey?: string;
openrouterApiKey?: string;
litellmApiKey?: string;
aiGatewayApiKey?: string;
cloudflareAiGatewayAccountId?: string;
cloudflareAiGatewayGatewayId?: string;