chore: merge origin/main into main

This commit is contained in:
Peter Steinberger
2026-02-22 13:42:52 +00:00
304 changed files with 17041 additions and 5502 deletions

View File

@@ -206,7 +206,13 @@ describe("callGateway url resolution", () => {
{
label: "keeps legacy admin scopes for explicit CLI callers",
call: () => callGatewayCli({ method: "health" }),
expectedScopes: ["operator.admin", "operator.approvals", "operator.pairing"],
expectedScopes: [
"operator.admin",
"operator.read",
"operator.write",
"operator.approvals",
"operator.pairing",
],
},
])("scope selection: $label", async ({ call, expectedScopes }) => {
setLocalLoopbackGatewayConfig();
@@ -328,6 +334,8 @@ describe("buildGatewayConnectionDetails", () => {
expect((thrown as Error).message).toContain("SECURITY ERROR");
expect((thrown as Error).message).toContain("plaintext ws://");
expect((thrown as Error).message).toContain("wss://");
expect((thrown as Error).message).toContain("Tailscale Serve/Funnel");
expect((thrown as Error).message).toContain("openclaw doctor --fix");
});
it("allows ws:// for loopback addresses in local mode", () => {

View File

@@ -149,7 +149,12 @@ export function buildGatewayConnectionDetails(
"Both credentials and chat data would be exposed to network interception.",
`Source: ${urlSource}`,
`Config: ${configPath}`,
"Fix: Use wss:// for the gateway URL, or connect via SSH tunnel to localhost.",
"Fix: Use wss:// for remote gateway URLs.",
"Safe remote access defaults:",
"- keep gateway.bind=loopback and use an SSH tunnel (ssh -N -L 18789:127.0.0.1:18789 user@gateway-host)",
"- or use Tailscale Serve/Funnel for HTTPS remote access",
"Doctor: openclaw doctor --fix",
"Docs: https://docs.openclaw.ai/gateway/remote",
].join("\n"),
);
}

View File

@@ -130,6 +130,9 @@ describe("GatewayClient security checks", () => {
message: expect.stringContaining("SECURITY ERROR"),
}),
);
const error = onConnectError.mock.calls[0]?.[0] as Error;
expect(error.message).toContain("openclaw doctor --fix");
expect(error.message).toContain("Tailscale Serve/Funnel");
expect(wsInstances.length).toBe(0); // No WebSocket created
client.stop();
});
@@ -149,6 +152,8 @@ describe("GatewayClient security checks", () => {
message: expect.stringContaining("SECURITY ERROR"),
}),
);
const error = onConnectError.mock.calls[0]?.[0] as Error;
expect(error.message).toContain("openclaw doctor --fix");
expect(wsInstances.length).toBe(0); // No WebSocket created
client.stop();
});

View File

@@ -126,7 +126,9 @@ export class GatewayClient {
const error = new Error(
`SECURITY ERROR: Cannot connect to "${displayHost}" over plaintext ws://. ` +
"Both credentials and chat data would be exposed to network interception. " +
"Use wss:// for the gateway URL, or connect via SSH tunnel to localhost.",
"Use wss:// for remote URLs. Safe defaults: keep gateway.bind=loopback and connect via SSH tunnel " +
"(ssh -N -L 18789:127.0.0.1:18789 user@gateway-host), or use Tailscale Serve/Funnel. " +
"Run `openclaw doctor --fix` for guidance.",
);
this.opts.onConnectError?.(error);
return;

View File

@@ -13,6 +13,8 @@ export type OperatorScope =
export const CLI_DEFAULT_OPERATOR_SCOPES: OperatorScope[] = [
ADMIN_SCOPE,
READ_SCOPE,
WRITE_SCOPE,
APPROVALS_SCOPE,
PAIRING_SCOPE,
];

View File

@@ -334,6 +334,21 @@ describe("OpenAI-compatible HTTP API (e2e)", () => {
expect(msg.content).toBe("hello");
}
{
agentCommand.mockClear();
agentCommand.mockResolvedValueOnce({ payloads: [{ text: "" }] } as never);
const res = await postChatCompletions(port, {
stream: false,
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
});
expect(res.status).toBe(200);
const json = (await res.json()) as Record<string, unknown>;
const choice0 = (json.choices as Array<Record<string, unknown>>)[0] ?? {};
const msg = (choice0.message as Record<string, unknown> | undefined) ?? {};
expect(msg.content).toBe("No response from OpenClaw.");
}
{
const res = await postChatCompletions(port, {
model: "openclaw",
@@ -475,6 +490,31 @@ describe("OpenAI-compatible HTTP API (e2e)", () => {
expect(fallbackText).toContain("[DONE]");
expect(fallbackText).toContain("hello");
}
{
agentCommand.mockClear();
agentCommand.mockRejectedValueOnce(new Error("boom"));
const errorRes = await postChatCompletions(port, {
stream: true,
model: "openclaw",
messages: [{ role: "user", content: "hi" }],
});
expect(errorRes.status).toBe(200);
const errorText = await errorRes.text();
const errorData = parseSseDataLines(errorText);
expect(errorData[errorData.length - 1]).toBe("[DONE]");
const errorChunks = errorData
.filter((d) => d !== "[DONE]")
.map((d) => JSON.parse(d) as Record<string, unknown>);
const stopChoice = errorChunks
.flatMap((c) => (c.choices as Array<Record<string, unknown>> | undefined) ?? [])
.find((choice) => choice.finish_reason === "stop");
expect((stopChoice?.delta as Record<string, unknown> | undefined)?.content).toBe(
"Error: internal error",
);
}
} finally {
// shared server
}

View File

@@ -41,6 +41,51 @@ function writeSse(res: ServerResponse, data: unknown) {
res.write(`data: ${JSON.stringify(data)}\n\n`);
}
function buildAgentCommandInput(params: {
prompt: { message: string; extraSystemPrompt?: string };
sessionKey: string;
runId: string;
}) {
return {
message: params.prompt.message,
extraSystemPrompt: params.prompt.extraSystemPrompt,
sessionKey: params.sessionKey,
runId: params.runId,
deliver: false as const,
messageChannel: "webchat" as const,
bestEffortDeliver: false as const,
};
}
function writeAssistantRoleChunk(res: ServerResponse, params: { runId: string; model: string }) {
writeSse(res, {
id: params.runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: params.model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
}
function writeAssistantContentChunk(
res: ServerResponse,
params: { runId: string; model: string; content: string; finishReason: "stop" | null },
) {
writeSse(res, {
id: params.runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: params.model,
choices: [
{
index: 0,
delta: { content: params.content },
finish_reason: params.finishReason,
},
],
});
}
function asMessages(val: unknown): OpenAiChatMessage[] {
return Array.isArray(val) ? (val as OpenAiChatMessage[]) : [];
}
@@ -194,22 +239,15 @@ export async function handleOpenAiHttpRequest(
const runId = `chatcmpl_${randomUUID()}`;
const deps = createDefaultDeps();
const commandInput = buildAgentCommandInput({
prompt,
sessionKey,
runId,
});
if (!stream) {
try {
const result = await agentCommand(
{
message: prompt.message,
extraSystemPrompt: prompt.extraSystemPrompt,
sessionKey,
runId,
deliver: false,
messageChannel: "webchat",
bestEffortDeliver: false,
},
defaultRuntime,
deps,
);
const result = await agentCommand(commandInput, defaultRuntime, deps);
const content = resolveAgentResponseText(result);
@@ -258,28 +296,15 @@ export async function handleOpenAiHttpRequest(
if (!wroteRole) {
wroteRole = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
writeAssistantRoleChunk(res, { runId, model });
}
sawAssistantDelta = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
writeAssistantContentChunk(res, {
runId,
model,
choices: [
{
index: 0,
delta: { content },
finish_reason: null,
},
],
content,
finishReason: null,
});
return;
}
@@ -302,19 +327,7 @@ export async function handleOpenAiHttpRequest(
void (async () => {
try {
const result = await agentCommand(
{
message: prompt.message,
extraSystemPrompt: prompt.extraSystemPrompt,
sessionKey,
runId,
deliver: false,
messageChannel: "webchat",
bestEffortDeliver: false,
},
defaultRuntime,
deps,
);
const result = await agentCommand(commandInput, defaultRuntime, deps);
if (closed) {
return;
@@ -323,30 +336,17 @@ export async function handleOpenAiHttpRequest(
if (!sawAssistantDelta) {
if (!wroteRole) {
wroteRole = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model,
choices: [{ index: 0, delta: { role: "assistant" } }],
});
writeAssistantRoleChunk(res, { runId, model });
}
const content = resolveAgentResponseText(result);
sawAssistantDelta = true;
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
writeAssistantContentChunk(res, {
runId,
model,
choices: [
{
index: 0,
delta: { content },
finish_reason: null,
},
],
content,
finishReason: null,
});
}
} catch (err) {
@@ -354,18 +354,11 @@ export async function handleOpenAiHttpRequest(
if (closed) {
return;
}
writeSse(res, {
id: runId,
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
writeAssistantContentChunk(res, {
runId,
model,
choices: [
{
index: 0,
delta: { content: "Error: internal error" },
finish_reason: "stop",
},
],
content: "Error: internal error",
finishReason: "stop",
});
emitAgentEvent({
runId,

View File

@@ -217,7 +217,7 @@ export const agentHandlers: GatewayRequestHandlers = {
}
const normalizedAttachments = normalizeRpcAttachmentsToChatAttachments(request.attachments);
let message = request.message.trim();
let message = (request.message ?? "").trim();
let images: Array<{ type: "image"; data: string; mimeType: string }> = [];
if (normalizedAttachments.length > 0) {
try {
@@ -695,7 +695,7 @@ export const agentHandlers: GatewayRequestHandlers = {
return;
}
const p = params;
const runId = p.runId.trim();
const runId = (p.runId ?? "").trim();
const timeoutMs =
typeof p.timeoutMs === "number" && Number.isFinite(p.timeoutMs)
? Math.max(0, Math.floor(p.timeoutMs))

View File

@@ -1,41 +1,28 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { CURRENT_SESSION_VERSION } from "@mariozechner/pi-coding-agent";
import { describe, expect, it, vi } from "vitest";
import { createMockSessionEntry, createTranscriptFixtureSync } from "./chat.test-helpers.js";
import type { GatewayRequestContext } from "./types.js";
// Guardrail: Ensure gateway "injected" assistant transcript messages are appended via SessionManager,
// so they are attached to the current leaf with a `parentId` and do not sever compaction history.
describe("gateway chat.inject transcript writes", () => {
it("appends a Pi session entry that includes parentId", async () => {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-chat-inject-"));
const transcriptPath = path.join(dir, "sess.jsonl");
// Minimal Pi session header so SessionManager can open/append safely.
fs.writeFileSync(
transcriptPath,
`${JSON.stringify({
type: "session",
version: CURRENT_SESSION_VERSION,
id: "sess-1",
timestamp: new Date(0).toISOString(),
cwd: "/tmp",
})}\n`,
"utf-8",
);
const sessionId = "sess-1";
const { transcriptPath } = createTranscriptFixtureSync({
prefix: "openclaw-chat-inject-",
sessionId,
});
vi.doMock("../session-utils.js", async (importOriginal) => {
const original = await importOriginal<typeof import("../session-utils.js")>();
return {
...original,
loadSessionEntry: () => ({
storePath: path.join(dir, "sessions.json"),
entry: {
sessionId: "sess-1",
sessionFile: transcriptPath,
},
}),
loadSessionEntry: () =>
createMockSessionEntry({
transcriptPath,
sessionId,
canonicalKey: "k1",
}),
};
});

View File

@@ -0,0 +1,42 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { CURRENT_SESSION_VERSION } from "@mariozechner/pi-coding-agent";
export function createTranscriptFixtureSync(params: {
prefix: string;
sessionId: string;
fileName?: string;
}) {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), params.prefix));
const transcriptPath = path.join(dir, params.fileName ?? "sess.jsonl");
fs.writeFileSync(
transcriptPath,
`${JSON.stringify({
type: "session",
version: CURRENT_SESSION_VERSION,
id: params.sessionId,
timestamp: new Date(0).toISOString(),
cwd: "/tmp",
})}\n`,
"utf-8",
);
return { dir, transcriptPath };
}
export function createMockSessionEntry(params: {
transcriptPath: string;
sessionId: string;
canonicalKey?: string;
cfg?: Record<string, unknown>;
}) {
return {
cfg: params.cfg ?? {},
storePath: path.join(path.dirname(params.transcriptPath), "sessions.json"),
entry: {
sessionId: params.sessionId,
sessionFile: params.transcriptPath,
},
canonicalKey: params.canonicalKey ?? "main",
};
}

View File

@@ -10,7 +10,10 @@ import type { MsgContext } from "../../auto-reply/templating.js";
import { createReplyPrefixOptions } from "../../channels/reply-prefix.js";
import { resolveSessionFilePath } from "../../config/sessions.js";
import { resolveSendPolicy } from "../../sessions/send-policy.js";
import { stripInlineDirectiveTagsForDisplay } from "../../utils/directive-tags.js";
import {
stripInlineDirectiveTagsForDisplay,
stripInlineDirectiveTagsFromMessageForDisplay,
} from "../../utils/directive-tags.js";
import { INTERNAL_MESSAGE_CHANNEL } from "../../utils/message-channel.js";
import {
abortChatRunById,
@@ -527,25 +530,6 @@ function nextChatSeq(context: { agentRunSeq: Map<string, number> }, runId: strin
return next;
}
function stripMessageDirectiveTags(
message: Record<string, unknown> | undefined,
): Record<string, unknown> | undefined {
if (!message) {
return message;
}
const content = message.content;
if (!Array.isArray(content)) {
return message;
}
const cleaned = content.map((part: Record<string, unknown>) => {
if (part.type === "text" && typeof part.text === "string") {
return { ...part, text: stripInlineDirectiveTagsForDisplay(part.text).text };
}
return part;
});
return { ...message, content: cleaned };
}
function broadcastChatFinal(params: {
context: Pick<GatewayRequestContext, "broadcast" | "nodeSendToSession" | "agentRunSeq">;
runId: string;
@@ -558,7 +542,7 @@ function broadcastChatFinal(params: {
sessionKey: params.sessionKey,
seq,
state: "final" as const,
message: stripMessageDirectiveTags(params.message),
message: stripInlineDirectiveTagsFromMessageForDisplay(params.message),
};
params.context.broadcast("chat", payload);
params.context.nodeSendToSession(params.sessionKey, "chat", payload);
@@ -1089,7 +1073,7 @@ export const chatHandlers: GatewayRequestHandlers = {
sessionKey: rawSessionKey,
seq: 0,
state: "final" as const,
message: stripMessageDirectiveTags(appended.message),
message: stripInlineDirectiveTagsFromMessageForDisplay(appended.message),
};
context.broadcast("chat", chatPayload);
context.nodeSendToSession(rawSessionKey, "chat", chatPayload);

View File

@@ -52,6 +52,7 @@ vi.mock("./session-utils.js", () => ({
import type { CliDeps } from "../cli/deps.js";
import { agentCommand } from "../commands/agent.js";
import type { HealthSummary } from "../commands/health.js";
import { loadConfig } from "../config/config.js";
import { updateSessionStore } from "../config/sessions.js";
import { requestHeartbeatNow } from "../infra/heartbeat-wake.js";
import { enqueueSystemEvent } from "../infra/system-events.js";
@@ -61,6 +62,7 @@ import { loadSessionEntry } from "./session-utils.js";
const enqueueSystemEventMock = vi.mocked(enqueueSystemEvent);
const requestHeartbeatNowMock = vi.mocked(requestHeartbeatNow);
const loadConfigMock = vi.mocked(loadConfig);
const agentCommandMock = vi.mocked(agentCommand);
const updateSessionStoreMock = vi.mocked(updateSessionStore);
const loadSessionEntryMock = vi.mocked(loadSessionEntry);
@@ -185,6 +187,65 @@ describe("node exec events", () => {
);
expect(requestHeartbeatNowMock).toHaveBeenCalledWith({ reason: "exec-event" });
});
it("suppresses exec.started when notifyOnExit is false", async () => {
loadConfigMock.mockReturnValueOnce({
session: { mainKey: "agent:main:main" },
tools: { exec: { notifyOnExit: false } },
} as ReturnType<typeof loadConfig>);
const ctx = buildCtx();
await handleNodeEvent(ctx, "node-1", {
event: "exec.started",
payloadJSON: JSON.stringify({
sessionKey: "agent:main:main",
runId: "run-silent-1",
command: "ls -la",
}),
});
expect(enqueueSystemEventMock).not.toHaveBeenCalled();
expect(requestHeartbeatNowMock).not.toHaveBeenCalled();
});
it("suppresses exec.finished when notifyOnExit is false", async () => {
loadConfigMock.mockReturnValueOnce({
session: { mainKey: "agent:main:main" },
tools: { exec: { notifyOnExit: false } },
} as ReturnType<typeof loadConfig>);
const ctx = buildCtx();
await handleNodeEvent(ctx, "node-2", {
event: "exec.finished",
payloadJSON: JSON.stringify({
runId: "run-silent-2",
exitCode: 0,
timedOut: false,
output: "some output",
}),
});
expect(enqueueSystemEventMock).not.toHaveBeenCalled();
expect(requestHeartbeatNowMock).not.toHaveBeenCalled();
});
it("suppresses exec.denied when notifyOnExit is false", async () => {
loadConfigMock.mockReturnValueOnce({
session: { mainKey: "agent:main:main" },
tools: { exec: { notifyOnExit: false } },
} as ReturnType<typeof loadConfig>);
const ctx = buildCtx();
await handleNodeEvent(ctx, "node-3", {
event: "exec.denied",
payloadJSON: JSON.stringify({
sessionKey: "agent:demo:main",
runId: "run-silent-3",
command: "rm -rf /",
reason: "allowlist-miss",
}),
});
expect(enqueueSystemEventMock).not.toHaveBeenCalled();
expect(requestHeartbeatNowMock).not.toHaveBeenCalled();
});
});
describe("voice transcript events", () => {

View File

@@ -471,6 +471,15 @@ export const handleNodeEvent = async (ctx: NodeEventContext, nodeId: string, evt
if (!sessionKey) {
return;
}
// Respect tools.exec.notifyOnExit setting (default: true)
// When false, skip system event notifications for node exec events.
const cfg = loadConfig();
const notifyOnExit = cfg.tools?.exec?.notifyOnExit !== false;
if (!notifyOnExit) {
return;
}
const runId = typeof obj.runId === "string" ? obj.runId.trim() : "";
const command = typeof obj.command === "string" ? obj.command.trim() : "";
const exitCode =

View File

@@ -42,4 +42,25 @@ describe("gateway startup log", () => {
expect(warn).not.toHaveBeenCalled();
});
it("logs all listen endpoints on a single line", () => {
const info = vi.fn();
const warn = vi.fn();
logGatewayStartup({
cfg: {},
bindHost: "127.0.0.1",
bindHosts: ["127.0.0.1", "::1"],
port: 18789,
log: { info, warn },
isNixMode: false,
});
const listenMessages = info.mock.calls
.map((call) => call[0])
.filter((message) => message.startsWith("listening on "));
expect(listenMessages).toEqual([
`listening on ws://127.0.0.1:18789, ws://[::1]:18789 (PID ${process.pid})`,
]);
});
});

View File

@@ -27,13 +27,8 @@ export function logGatewayStartup(params: {
const formatHost = (host: string) => (host.includes(":") ? `[${host}]` : host);
const hosts =
params.bindHosts && params.bindHosts.length > 0 ? params.bindHosts : [params.bindHost];
const primaryHost = hosts[0] ?? params.bindHost;
params.log.info(
`listening on ${scheme}://${formatHost(primaryHost)}:${params.port} (PID ${process.pid})`,
);
for (const host of hosts.slice(1)) {
params.log.info(`listening on ${scheme}://${formatHost(host)}:${params.port}`);
}
const listenEndpoints = hosts.map((host) => `${scheme}://${formatHost(host)}:${params.port}`);
params.log.info(`listening on ${listenEndpoints.join(", ")} (PID ${process.pid})`);
params.log.info(`log file: ${getResolvedLoggerSettings().file}`);
if (params.isNixMode) {
params.log.info("gateway: running in Nix mode (config managed externally)");

View File

@@ -873,7 +873,13 @@ describe("gateway server auth/connect", () => {
const { randomUUID } = await import("node:crypto");
const os = await import("node:os");
const path = await import("node:path");
const scopes = ["operator.admin", "operator.approvals", "operator.pairing"];
const scopes = [
"operator.admin",
"operator.read",
"operator.write",
"operator.approvals",
"operator.pairing",
];
const { device } = await createSignedDevice({
token: "secret",
scopes,

View File

@@ -82,7 +82,106 @@ const whatsappRegistry = createRegistry([
]);
const emptyRegistry = createRegistry([]);
type ModelCatalogRpcEntry = {
id: string;
name: string;
provider: string;
contextWindow?: number;
};
type PiCatalogFixtureEntry = {
id: string;
provider: string;
name?: string;
contextWindow?: number;
};
const buildPiCatalogFixture = (): PiCatalogFixtureEntry[] => [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
const expectedSortedCatalog = (): ModelCatalogRpcEntry[] => [
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
];
describe("gateway server models + voicewake", () => {
const listModels = async () => rpcReq<{ models: ModelCatalogRpcEntry[] }>(ws, "models.list");
const seedPiCatalog = () => {
piSdkMock.enabled = true;
piSdkMock.models = buildPiCatalogFixture();
};
const withModelsConfig = async <T>(config: unknown, run: () => Promise<T>): Promise<T> => {
const configPath = process.env.OPENCLAW_CONFIG_PATH;
if (!configPath) {
throw new Error("Missing OPENCLAW_CONFIG_PATH");
}
let previousConfig: string | undefined;
try {
previousConfig = await fs.readFile(configPath, "utf-8");
} catch (err) {
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(configPath, JSON.stringify(config, null, 2), "utf-8");
clearConfigCache();
return await run();
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
};
const withTempHome = async <T>(fn: (homeDir: string) => Promise<T>): Promise<T> => {
const tempHome = await createTempHomeEnv("openclaw-home-");
try {
@@ -180,171 +279,75 @@ describe("gateway server models + voicewake", () => {
});
test("models.list returns model catalog", async () => {
piSdkMock.enabled = true;
piSdkMock.models = [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
seedPiCatalog();
const res1 = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
const res2 = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
const res1 = await listModels();
const res2 = await listModels();
expect(res1.ok).toBe(true);
expect(res2.ok).toBe(true);
const models = res1.payload?.models ?? [];
expect(models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
expect(models).toEqual(expectedSortedCatalog());
expect(piSdkMock.discoverCalls).toBe(1);
});
test("models.list filters to allowlisted configured models by default", async () => {
const configPath = process.env.OPENCLAW_CONFIG_PATH;
if (!configPath) {
throw new Error("Missing OPENCLAW_CONFIG_PATH");
}
let previousConfig: string | undefined;
try {
previousConfig = await fs.readFile(configPath, "utf-8");
} catch (err) {
const code = (err as NodeJS.ErrnoException | undefined)?.code;
if (code !== "ENOENT") {
throw err;
}
}
try {
await fs.mkdir(path.dirname(configPath), { recursive: true });
await fs.writeFile(
configPath,
JSON.stringify(
{
agents: {
defaults: {
model: { primary: "openai/gpt-test-z" },
models: {
"openai/gpt-test-z": {},
"anthropic/claude-test-a": {},
},
},
await withModelsConfig(
{
agents: {
defaults: {
model: { primary: "openai/gpt-test-z" },
models: {
"openai/gpt-test-z": {},
"anthropic/claude-test-a": {},
},
},
null,
2,
),
"utf-8",
);
clearConfigCache();
},
},
async () => {
seedPiCatalog();
const res = await listModels();
piSdkMock.enabled = true;
piSdkMock.models = [
{ id: "gpt-test-z", provider: "openai", contextWindow: 0 },
{
id: "gpt-test-a",
name: "A-Model",
provider: "openai",
contextWindow: 8000,
},
{
id: "claude-test-b",
name: "B-Model",
provider: "anthropic",
contextWindow: 1000,
},
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
];
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
},
);
});
const res = await rpcReq<{
models: Array<{
id: string;
name: string;
provider: string;
contextWindow?: number;
}>;
}>(ws, "models.list");
test("models.list falls back to full catalog when allowlist has no catalog match", async () => {
await withModelsConfig(
{
agents: {
defaults: {
model: { primary: "openai/not-in-catalog" },
models: {
"openai/not-in-catalog": {},
},
},
},
},
async () => {
seedPiCatalog();
const res = await listModels();
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual([
{
id: "claude-test-a",
name: "A-Model",
provider: "anthropic",
contextWindow: 200_000,
},
{
id: "gpt-test-z",
name: "gpt-test-z",
provider: "openai",
},
]);
} finally {
if (previousConfig === undefined) {
await fs.rm(configPath, { force: true });
} else {
await fs.writeFile(configPath, previousConfig, "utf-8");
}
clearConfigCache();
}
expect(res.ok).toBe(true);
expect(res.payload?.models).toEqual(expectedSortedCatalog());
},
);
});
test("models.list rejects unknown params", async () => {

View File

@@ -430,7 +430,7 @@ export function resolveSessionStoreKey(params: {
cfg: OpenClawConfig;
sessionKey: string;
}): string {
const raw = params.sessionKey.trim();
const raw = (params.sessionKey ?? "").trim();
if (!raw) {
return raw;
}