mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-08 07:11:25 +00:00
Config: expand Kilo catalog and persist selected Kilo models (#24921)
Merged via /review-pr -> /prepare-pr -> /merge-pr.
Prepared head SHA: f5a7e1a385
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Reviewed-by: @gumadeiras
This commit is contained in:
committed by
GitHub
parent
6c441ea797
commit
5239b55c0a
@@ -33,6 +33,7 @@ let importPiSdk = defaultImportPiSdk;
|
||||
const CODEX_PROVIDER = "openai-codex";
|
||||
const OPENAI_CODEX_GPT53_MODEL_ID = "gpt-5.3-codex";
|
||||
const OPENAI_CODEX_GPT53_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
|
||||
const NON_PI_NATIVE_MODEL_PROVIDERS = new Set(["kilocode"]);
|
||||
|
||||
function applyOpenAICodexSparkFallback(models: ModelCatalogEntry[]): void {
|
||||
const hasSpark = models.some(
|
||||
@@ -59,6 +60,89 @@ function applyOpenAICodexSparkFallback(models: ModelCatalogEntry[]): void {
|
||||
});
|
||||
}
|
||||
|
||||
function normalizeConfiguredModelInput(input: unknown): Array<"text" | "image"> | undefined {
|
||||
if (!Array.isArray(input)) {
|
||||
return undefined;
|
||||
}
|
||||
const normalized = input.filter(
|
||||
(item): item is "text" | "image" => item === "text" || item === "image",
|
||||
);
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
}
|
||||
|
||||
function readConfiguredOptInProviderModels(config: OpenClawConfig): ModelCatalogEntry[] {
|
||||
const providers = config.models?.providers;
|
||||
if (!providers || typeof providers !== "object") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const out: ModelCatalogEntry[] = [];
|
||||
for (const [providerRaw, providerValue] of Object.entries(providers)) {
|
||||
const provider = providerRaw.toLowerCase().trim();
|
||||
if (!NON_PI_NATIVE_MODEL_PROVIDERS.has(provider)) {
|
||||
continue;
|
||||
}
|
||||
if (!providerValue || typeof providerValue !== "object") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const configuredModels = (providerValue as { models?: unknown }).models;
|
||||
if (!Array.isArray(configuredModels)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const configuredModel of configuredModels) {
|
||||
if (!configuredModel || typeof configuredModel !== "object") {
|
||||
continue;
|
||||
}
|
||||
const idRaw = (configuredModel as { id?: unknown }).id;
|
||||
if (typeof idRaw !== "string") {
|
||||
continue;
|
||||
}
|
||||
const id = idRaw.trim();
|
||||
if (!id) {
|
||||
continue;
|
||||
}
|
||||
const rawName = (configuredModel as { name?: unknown }).name;
|
||||
const name = (typeof rawName === "string" ? rawName : id).trim() || id;
|
||||
const contextWindowRaw = (configuredModel as { contextWindow?: unknown }).contextWindow;
|
||||
const contextWindow =
|
||||
typeof contextWindowRaw === "number" && contextWindowRaw > 0 ? contextWindowRaw : undefined;
|
||||
const reasoningRaw = (configuredModel as { reasoning?: unknown }).reasoning;
|
||||
const reasoning = typeof reasoningRaw === "boolean" ? reasoningRaw : undefined;
|
||||
const input = normalizeConfiguredModelInput((configuredModel as { input?: unknown }).input);
|
||||
out.push({ id, name, provider, contextWindow, reasoning, input });
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
function mergeConfiguredOptInProviderModels(params: {
|
||||
config: OpenClawConfig;
|
||||
models: ModelCatalogEntry[];
|
||||
}): void {
|
||||
const configured = readConfiguredOptInProviderModels(params.config);
|
||||
if (configured.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const seen = new Set(
|
||||
params.models.map(
|
||||
(entry) => `${entry.provider.toLowerCase().trim()}::${entry.id.toLowerCase().trim()}`,
|
||||
),
|
||||
);
|
||||
|
||||
for (const entry of configured) {
|
||||
const key = `${entry.provider.toLowerCase().trim()}::${entry.id.toLowerCase().trim()}`;
|
||||
if (seen.has(key)) {
|
||||
continue;
|
||||
}
|
||||
params.models.push(entry);
|
||||
seen.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
export function resetModelCatalogCacheForTest() {
|
||||
modelCatalogPromise = null;
|
||||
hasLoggedModelCatalogError = false;
|
||||
@@ -142,6 +226,7 @@ export async function loadModelCatalog(params?: {
|
||||
const input = Array.isArray(entry?.input) ? entry.input : undefined;
|
||||
models.push({ id, name, provider, contextWindow, reasoning, input });
|
||||
}
|
||||
mergeConfiguredOptInProviderModels({ config: cfg, models });
|
||||
applyOpenAICodexSparkFallback(models);
|
||||
|
||||
if (models.length === 0) {
|
||||
|
||||
Reference in New Issue
Block a user