Config: expand Kilo catalog and persist selected Kilo models (#24921)

Merged via /review-pr -> /prepare-pr -> /merge-pr.

Prepared head SHA: f5a7e1a385
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Reviewed-by: @gumadeiras
This commit is contained in:
Gustavo Madeira Santana
2026-02-23 21:17:37 -05:00
committed by GitHub
parent 6c441ea797
commit 5239b55c0a
14 changed files with 668 additions and 21 deletions

View File

@@ -103,4 +103,124 @@ describe("loadModelCatalog", () => {
expect(spark?.name).toBe("gpt-5.3-codex-spark");
expect(spark?.reasoning).toBe(true);
});
it("merges configured models for opted-in non-pi-native providers", async () => {
__setModelCatalogImportForTest(
async () =>
({
AuthStorage: class {},
ModelRegistry: class {
getAll() {
return [{ id: "gpt-4.1", provider: "openai", name: "GPT-4.1" }];
}
},
}) as unknown as PiSdkModule,
);
const result = await loadModelCatalog({
config: {
models: {
providers: {
kilocode: {
models: [
{
id: "google/gemini-3-pro-preview",
name: "Gemini 3 Pro Preview",
input: ["text", "image"],
reasoning: true,
contextWindow: 1048576,
},
],
},
},
},
} as OpenClawConfig,
});
expect(result).toContainEqual(
expect.objectContaining({
provider: "kilocode",
id: "google/gemini-3-pro-preview",
name: "Gemini 3 Pro Preview",
}),
);
});
it("does not merge configured models for providers that are not opted in", async () => {
__setModelCatalogImportForTest(
async () =>
({
AuthStorage: class {},
ModelRegistry: class {
getAll() {
return [{ id: "gpt-4.1", provider: "openai", name: "GPT-4.1" }];
}
},
}) as unknown as PiSdkModule,
);
const result = await loadModelCatalog({
config: {
models: {
providers: {
qianfan: {
models: [
{
id: "deepseek-v3.2",
name: "DEEPSEEK V3.2",
},
],
},
},
},
} as OpenClawConfig,
});
expect(
result.some((entry) => entry.provider === "qianfan" && entry.id === "deepseek-v3.2"),
).toBe(false);
});
it("does not duplicate opted-in configured models already present in ModelRegistry", async () => {
__setModelCatalogImportForTest(
async () =>
({
AuthStorage: class {},
ModelRegistry: class {
getAll() {
return [
{
id: "anthropic/claude-opus-4.6",
provider: "kilocode",
name: "Claude Opus 4.6",
},
];
}
},
}) as unknown as PiSdkModule,
);
const result = await loadModelCatalog({
config: {
models: {
providers: {
kilocode: {
models: [
{
id: "anthropic/claude-opus-4.6",
name: "Configured Claude Opus 4.6",
},
],
},
},
},
} as OpenClawConfig,
});
const matches = result.filter(
(entry) => entry.provider === "kilocode" && entry.id === "anthropic/claude-opus-4.6",
);
expect(matches).toHaveLength(1);
expect(matches[0]?.name).toBe("Claude Opus 4.6");
});
});

View File

@@ -33,6 +33,7 @@ let importPiSdk = defaultImportPiSdk;
const CODEX_PROVIDER = "openai-codex";
const OPENAI_CODEX_GPT53_MODEL_ID = "gpt-5.3-codex";
const OPENAI_CODEX_GPT53_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
const NON_PI_NATIVE_MODEL_PROVIDERS = new Set(["kilocode"]);
function applyOpenAICodexSparkFallback(models: ModelCatalogEntry[]): void {
const hasSpark = models.some(
@@ -59,6 +60,89 @@ function applyOpenAICodexSparkFallback(models: ModelCatalogEntry[]): void {
});
}
function normalizeConfiguredModelInput(input: unknown): Array<"text" | "image"> | undefined {
if (!Array.isArray(input)) {
return undefined;
}
const normalized = input.filter(
(item): item is "text" | "image" => item === "text" || item === "image",
);
return normalized.length > 0 ? normalized : undefined;
}
function readConfiguredOptInProviderModels(config: OpenClawConfig): ModelCatalogEntry[] {
const providers = config.models?.providers;
if (!providers || typeof providers !== "object") {
return [];
}
const out: ModelCatalogEntry[] = [];
for (const [providerRaw, providerValue] of Object.entries(providers)) {
const provider = providerRaw.toLowerCase().trim();
if (!NON_PI_NATIVE_MODEL_PROVIDERS.has(provider)) {
continue;
}
if (!providerValue || typeof providerValue !== "object") {
continue;
}
const configuredModels = (providerValue as { models?: unknown }).models;
if (!Array.isArray(configuredModels)) {
continue;
}
for (const configuredModel of configuredModels) {
if (!configuredModel || typeof configuredModel !== "object") {
continue;
}
const idRaw = (configuredModel as { id?: unknown }).id;
if (typeof idRaw !== "string") {
continue;
}
const id = idRaw.trim();
if (!id) {
continue;
}
const rawName = (configuredModel as { name?: unknown }).name;
const name = (typeof rawName === "string" ? rawName : id).trim() || id;
const contextWindowRaw = (configuredModel as { contextWindow?: unknown }).contextWindow;
const contextWindow =
typeof contextWindowRaw === "number" && contextWindowRaw > 0 ? contextWindowRaw : undefined;
const reasoningRaw = (configuredModel as { reasoning?: unknown }).reasoning;
const reasoning = typeof reasoningRaw === "boolean" ? reasoningRaw : undefined;
const input = normalizeConfiguredModelInput((configuredModel as { input?: unknown }).input);
out.push({ id, name, provider, contextWindow, reasoning, input });
}
}
return out;
}
function mergeConfiguredOptInProviderModels(params: {
config: OpenClawConfig;
models: ModelCatalogEntry[];
}): void {
const configured = readConfiguredOptInProviderModels(params.config);
if (configured.length === 0) {
return;
}
const seen = new Set(
params.models.map(
(entry) => `${entry.provider.toLowerCase().trim()}::${entry.id.toLowerCase().trim()}`,
),
);
for (const entry of configured) {
const key = `${entry.provider.toLowerCase().trim()}::${entry.id.toLowerCase().trim()}`;
if (seen.has(key)) {
continue;
}
params.models.push(entry);
seen.add(key);
}
}
export function resetModelCatalogCacheForTest() {
modelCatalogPromise = null;
hasLoggedModelCatalogError = false;
@@ -142,6 +226,7 @@ export async function loadModelCatalog(params?: {
const input = Array.isArray(entry?.input) ? entry.input : undefined;
models.push({ id, name, provider, contextWindow, reasoning, input });
}
mergeConfiguredOptInProviderModels({ config: cfg, models });
applyOpenAICodexSparkFallback(models);
if (models.length === 0) {

View File

@@ -5,6 +5,18 @@ import { describe, expect, it } from "vitest";
import { captureEnv } from "../test-utils/env.js";
import { buildKilocodeProvider, resolveImplicitProviders } from "./models-config.providers.js";
const KILOCODE_MODEL_IDS = [
"anthropic/claude-opus-4.6",
"z-ai/glm-5:free",
"minimax/minimax-m2.5:free",
"anthropic/claude-sonnet-4.5",
"openai/gpt-5.2",
"google/gemini-3-pro-preview",
"google/gemini-3-flash-preview",
"x-ai/grok-code-fast-1",
"moonshotai/kimi-k2.5",
];
describe("Kilo Gateway implicit provider", () => {
it("should include kilocode when KILOCODE_API_KEY is configured", async () => {
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-test-"));
@@ -46,4 +58,12 @@ describe("Kilo Gateway implicit provider", () => {
const modelIds = provider.models.map((m) => m.id);
expect(modelIds).toContain("anthropic/claude-opus-4.6");
});
it("should include the full surfaced model catalog", () => {
const provider = buildKilocodeProvider();
const modelIds = provider.models.map((m) => m.id);
for (const modelId of KILOCODE_MODEL_IDS) {
expect(modelIds).toContain(modelId);
}
});
});

View File

@@ -10,8 +10,7 @@ import {
KILOCODE_DEFAULT_CONTEXT_WINDOW,
KILOCODE_DEFAULT_COST,
KILOCODE_DEFAULT_MAX_TOKENS,
KILOCODE_DEFAULT_MODEL_ID,
KILOCODE_DEFAULT_MODEL_NAME,
KILOCODE_MODEL_CATALOG,
} from "../providers/kilocode-shared.js";
import { ensureAuthProfileStore, listProfilesForProvider } from "./auth-profiles.js";
import { discoverBedrockModels } from "./bedrock-discovery.js";
@@ -776,17 +775,15 @@ export function buildKilocodeProvider(): ProviderConfig {
return {
baseUrl: KILOCODE_BASE_URL,
api: "openai-completions",
models: [
{
id: KILOCODE_DEFAULT_MODEL_ID,
name: KILOCODE_DEFAULT_MODEL_NAME,
reasoning: true,
input: ["text", "image"],
cost: KILOCODE_DEFAULT_COST,
contextWindow: KILOCODE_DEFAULT_CONTEXT_WINDOW,
maxTokens: KILOCODE_DEFAULT_MAX_TOKENS,
},
],
models: KILOCODE_MODEL_CATALOG.map((model) => ({
id: model.id,
name: model.name,
reasoning: model.reasoning,
input: model.input,
cost: KILOCODE_DEFAULT_COST,
contextWindow: model.contextWindow ?? KILOCODE_DEFAULT_CONTEXT_WINDOW,
maxTokens: model.maxTokens ?? KILOCODE_DEFAULT_MAX_TOKENS,
})),
};
}