Files
openclaw/src/agents/subagent-registry.lifecycle-retry-grace.test.ts
Onur Solmaz a7d56e3554 feat: ACP thread-bound agents (#23580)
* docs: add ACP thread-bound agents plan doc

* docs: expand ACP implementation specification

* feat(acp): route ACP sessions through core dispatch and lifecycle cleanup

* feat(acp): add /acp commands and Discord spawn gate

* ACP: add acpx runtime plugin backend

* fix(subagents): defer transient lifecycle errors before announce

* Agents: harden ACP sessions_spawn and tighten spawn guidance

* Agents: require explicit ACP target for runtime spawns

* docs: expand ACP control-plane implementation plan

* ACP: harden metadata seeding and spawn guidance

* ACP: centralize runtime control-plane manager and fail-closed dispatch

* ACP: harden runtime manager and unify spawn helpers

* Commands: route ACP sessions through ACP runtime in agent command

* ACP: require persisted metadata for runtime spawns

* Sessions: preserve ACP metadata when updating entries

* Plugins: harden ACP backend registry across loaders

* ACPX: make availability probe compatible with adapters

* E2E: add manual Discord ACP plain-language smoke script

* ACPX: preserve streamed spacing across Discord delivery

* Docs: add ACP Discord streaming strategy

* ACP: harden Discord stream buffering for thread replies

* ACP: reuse shared block reply pipeline for projector

* ACP: unify streaming config and adopt coalesceIdleMs

* Docs: add temporary ACP production hardening plan

* Docs: trim temporary ACP hardening plan goals

* Docs: gate ACP thread controls by backend capabilities

* ACP: add capability-gated runtime controls and /acp operator commands

* Docs: remove temporary ACP hardening plan

* ACP: fix spawn target validation and close cache cleanup

* ACP: harden runtime dispatch and recovery paths

* ACP: split ACP command/runtime internals and centralize policy

* ACP: harden runtime lifecycle, validation, and observability

* ACP: surface runtime and backend session IDs in thread bindings

* docs: add temp plan for binding-service migration

* ACP: migrate thread binding flows to SessionBindingService

* ACP: address review feedback and preserve prompt wording

* ACPX plugin: pin runtime dependency and prefer bundled CLI

* Discord: complete binding-service migration cleanup and restore ACP plan

* Docs: add standalone ACP agents guide

* ACP: route harness intents to thread-bound ACP sessions

* ACP: fix spawn thread routing and queue-owner stall

* ACP: harden startup reconciliation and command bypass handling

* ACP: fix dispatch bypass type narrowing

* ACP: align runtime metadata to agentSessionId

* ACP: normalize session identifier handling and labels

* ACP: mark thread banner session ids provisional until first reply

* ACP: stabilize session identity mapping and startup reconciliation

* ACP: add resolved session-id notices and cwd in thread intros

* Discord: prefix thread meta notices consistently

* Discord: unify ACP/thread meta notices with gear prefix

* Discord: split thread persona naming from meta formatting

* Extensions: bump acpx plugin dependency to 0.1.9

* Agents: gate ACP prompt guidance behind acp.enabled

* Docs: remove temp experiment plan docs

* Docs: scope streaming plan to holy grail refactor

* Docs: refactor ACP agents guide for human-first flow

* Docs/Skill: add ACP feature-flag guidance and direct acpx telephone-game flow

* Docs/Skill: add OpenCode and Pi to ACP harness lists

* Docs/Skill: align ACP harness list with current acpx registry

* Dev/Test: move ACP plain-language smoke script and mark as keep

* Docs/Skill: reorder ACP harness lists with Pi first

* ACP: split control-plane manager into core/types/utils modules

* Docs: refresh ACP thread-bound agents plan

* ACP: extract dispatch lane and split manager domains

* ACP: centralize binding context and remove reverse deps

* Infra: unify system message formatting

* ACP: centralize error boundaries and session id rendering

* ACP: enforce init concurrency cap and strict meta clear

* Tests: fix ACP dispatch binding mock typing

* Tests: fix Discord thread-binding mock drift and ACP request id

* ACP: gate slash bypass and persist cleared overrides

* ACPX: await pre-abort cancel before runTurn return

* Extension: pin acpx runtime dependency to 0.1.11

* Docs: add pinned acpx install strategy for ACP extension

* Extensions/acpx: enforce strict local pinned startup

* Extensions/acpx: tighten acp-router install guidance

* ACPX: retry runtime test temp-dir cleanup

* Extensions/acpx: require proactive ACPX repair for thread spawns

* Extensions/acpx: require restart offer after acpx reinstall

* extensions/acpx: remove workspace protocol devDependency

* extensions/acpx: bump pinned acpx to 0.1.13

* extensions/acpx: sync lockfile after dependency bump

* ACPX: make runtime spawn Windows-safe

* fix: align doctor-config-flow repair tests with default-account migration (#23580) (thanks @osolmaz)
2026-02-26 11:00:09 +01:00

158 lines
4.4 KiB
TypeScript

import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
const noop = () => {};
let lifecycleHandler:
| ((evt: {
stream?: string;
runId: string;
data?: {
phase?: string;
startedAt?: number;
endedAt?: number;
aborted?: boolean;
error?: string;
};
}) => void)
| undefined;
vi.mock("../gateway/call.js", () => ({
callGateway: vi.fn(async (request: unknown) => {
const method = (request as { method?: string }).method;
if (method === "agent.wait") {
// Keep wait unresolved from the RPC path so lifecycle fallback logic is exercised.
return { status: "pending" };
}
return {};
}),
}));
vi.mock("../infra/agent-events.js", () => ({
onAgentEvent: vi.fn((handler: typeof lifecycleHandler) => {
lifecycleHandler = handler;
return noop;
}),
}));
vi.mock("../config/config.js", () => ({
loadConfig: vi.fn(() => ({
agents: { defaults: { subagents: { archiveAfterMinutes: 0 } } },
})),
}));
const announceSpy = vi.fn(async () => true);
vi.mock("./subagent-announce.js", () => ({
runSubagentAnnounceFlow: announceSpy,
}));
vi.mock("../plugins/hook-runner-global.js", () => ({
getGlobalHookRunner: vi.fn(() => null),
}));
vi.mock("./subagent-registry.store.js", () => ({
loadSubagentRegistryFromDisk: vi.fn(() => new Map()),
saveSubagentRegistryToDisk: vi.fn(() => {}),
}));
describe("subagent registry lifecycle error grace", () => {
let mod: typeof import("./subagent-registry.js");
beforeAll(async () => {
mod = await import("./subagent-registry.js");
});
beforeEach(() => {
vi.useFakeTimers();
});
afterEach(() => {
announceSpy.mockClear();
lifecycleHandler = undefined;
mod.resetSubagentRegistryForTests({ persist: false });
vi.useRealTimers();
});
const flushAsync = async () => {
await Promise.resolve();
await Promise.resolve();
};
it("ignores transient lifecycle errors when run retries and then ends successfully", async () => {
mod.registerSubagentRun({
runId: "run-transient-error",
childSessionKey: "agent:main:subagent:transient-error",
requesterSessionKey: "agent:main:main",
requesterDisplayKey: "main",
task: "transient error test",
cleanup: "keep",
expectsCompletionMessage: true,
});
lifecycleHandler?.({
stream: "lifecycle",
runId: "run-transient-error",
data: { phase: "error", error: "rate limit", endedAt: 1_000 },
});
await flushAsync();
expect(announceSpy).not.toHaveBeenCalled();
await vi.advanceTimersByTimeAsync(14_999);
expect(announceSpy).not.toHaveBeenCalled();
lifecycleHandler?.({
stream: "lifecycle",
runId: "run-transient-error",
data: { phase: "start", startedAt: 1_050 },
});
await flushAsync();
await vi.advanceTimersByTimeAsync(20_000);
expect(announceSpy).not.toHaveBeenCalled();
lifecycleHandler?.({
stream: "lifecycle",
runId: "run-transient-error",
data: { phase: "end", endedAt: 1_250 },
});
await flushAsync();
expect(announceSpy).toHaveBeenCalledTimes(1);
const announceCalls = announceSpy.mock.calls as unknown as Array<Array<unknown>>;
const first = (announceCalls[0]?.[0] ?? {}) as {
outcome?: { status?: string; error?: string };
};
expect(first.outcome?.status).toBe("ok");
});
it("announces error when lifecycle error remains terminal after grace window", async () => {
mod.registerSubagentRun({
runId: "run-terminal-error",
childSessionKey: "agent:main:subagent:terminal-error",
requesterSessionKey: "agent:main:main",
requesterDisplayKey: "main",
task: "terminal error test",
cleanup: "keep",
expectsCompletionMessage: true,
});
lifecycleHandler?.({
stream: "lifecycle",
runId: "run-terminal-error",
data: { phase: "error", error: "fatal failure", endedAt: 2_000 },
});
await flushAsync();
expect(announceSpy).not.toHaveBeenCalled();
await vi.advanceTimersByTimeAsync(15_000);
await flushAsync();
expect(announceSpy).toHaveBeenCalledTimes(1);
const announceCalls = announceSpy.mock.calls as unknown as Array<Array<unknown>>;
const first = (announceCalls[0]?.[0] ?? {}) as {
outcome?: { status?: string; error?: string };
};
expect(first.outcome?.status).toBe("error");
expect(first.outcome?.error).toBe("fatal failure");
});
});