mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-08 12:51:24 +00:00
Agents: log lifecycle error text for embedded run failures
This commit is contained in:
@@ -22,6 +22,7 @@ Docs: https://docs.openclaw.ai
|
|||||||
- Gateway/Config reload: compare array-valued config paths structurally during diffing so unchanged `memory.qmd.paths` and `memory.qmd.scope.rules` no longer trigger false restart-required reloads. (#23185) Thanks @rex05ai.
|
- Gateway/Config reload: compare array-valued config paths structurally during diffing so unchanged `memory.qmd.paths` and `memory.qmd.scope.rules` no longer trigger false restart-required reloads. (#23185) Thanks @rex05ai.
|
||||||
- TUI/Input: enable multiline-paste burst coalescing on macOS Terminal.app and iTerm so pasted blocks no longer submit line-by-line as separate messages. (#18809) Thanks @fwends.
|
- TUI/Input: enable multiline-paste burst coalescing on macOS Terminal.app and iTerm so pasted blocks no longer submit line-by-line as separate messages. (#18809) Thanks @fwends.
|
||||||
- Agents/Fallbacks: treat JSON payloads with `type: "api_error"` + `"Internal server error"` as transient failover errors so Anthropic 500-style failures trigger model fallback. (#23193) Thanks @jarvis-lane.
|
- Agents/Fallbacks: treat JSON payloads with `type: "api_error"` + `"Internal server error"` as transient failover errors so Anthropic 500-style failures trigger model fallback. (#23193) Thanks @jarvis-lane.
|
||||||
|
- Agents/Diagnostics: include resolved lifecycle error text in `embedded run agent end` warnings so UI/TUI “Connection error” runs expose actionable provider failure reasons in gateway logs. (#23054) Thanks @Raize.
|
||||||
- Gateway/Pairing: treat operator.admin pairing tokens as satisfying operator.write requests so legacy devices stop looping through scope-upgrade prompts introduced in 2026.2.19. (#23125, #23006) Thanks @vignesh07.
|
- Gateway/Pairing: treat operator.admin pairing tokens as satisfying operator.write requests so legacy devices stop looping through scope-upgrade prompts introduced in 2026.2.19. (#23125, #23006) Thanks @vignesh07.
|
||||||
- Memory/QMD: add optional `memory.qmd.mcporter` search routing so QMD `query/search/vsearch` can run through mcporter keep-alive flows (including multi-collection paths) to reduce cold starts, while keeping searches on agent-scoped QMD state for consistent recall. (#19617) Thanks @nicole-luxe and @vignesh07.
|
- Memory/QMD: add optional `memory.qmd.mcporter` search routing so QMD `query/search/vsearch` can run through mcporter keep-alive flows (including multi-collection paths) to reduce cold starts, while keeping searches on agent-scoped QMD state for consistent recall. (#19617) Thanks @nicole-luxe and @vignesh07.
|
||||||
- Chat/UI: strip inline reply/audio directive tags (`[[reply_to_current]]`, `[[reply_to:<id>]]`, `[[audio_as_voice]]`) from displayed chat history, live chat event output, and session preview snippets so control tags no longer leak into user-visible surfaces.
|
- Chat/UI: strip inline reply/audio directive tags (`[[reply_to_current]]`, `[[reply_to:<id>]]`, `[[audio_as_voice]]`) from displayed chat history, live chat event output, and session preview snippets so control tags no longer leak into user-visible surfaces.
|
||||||
|
|||||||
76
src/agents/pi-embedded-subscribe.handlers.lifecycle.test.ts
Normal file
76
src/agents/pi-embedded-subscribe.handlers.lifecycle.test.ts
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import { describe, expect, it, vi } from "vitest";
|
||||||
|
import { createInlineCodeState } from "../markdown/code-spans.js";
|
||||||
|
import { handleAgentEnd } from "./pi-embedded-subscribe.handlers.lifecycle.js";
|
||||||
|
import type { EmbeddedPiSubscribeContext } from "./pi-embedded-subscribe.handlers.types.js";
|
||||||
|
|
||||||
|
vi.mock("../infra/agent-events.js", () => ({
|
||||||
|
emitAgentEvent: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
function createContext(
|
||||||
|
lastAssistant: unknown,
|
||||||
|
overrides?: { onAgentEvent?: (event: unknown) => void },
|
||||||
|
): EmbeddedPiSubscribeContext {
|
||||||
|
return {
|
||||||
|
params: {
|
||||||
|
runId: "run-1",
|
||||||
|
config: {},
|
||||||
|
sessionKey: "agent:main:main",
|
||||||
|
onAgentEvent: overrides?.onAgentEvent,
|
||||||
|
},
|
||||||
|
state: {
|
||||||
|
lastAssistant: lastAssistant as EmbeddedPiSubscribeContext["state"]["lastAssistant"],
|
||||||
|
pendingCompactionRetry: 0,
|
||||||
|
blockState: {
|
||||||
|
thinking: true,
|
||||||
|
final: true,
|
||||||
|
inlineCode: createInlineCodeState(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
log: {
|
||||||
|
debug: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
},
|
||||||
|
flushBlockReplyBuffer: vi.fn(),
|
||||||
|
resolveCompactionRetry: vi.fn(),
|
||||||
|
maybeResolveCompactionWait: vi.fn(),
|
||||||
|
} as unknown as EmbeddedPiSubscribeContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("handleAgentEnd", () => {
|
||||||
|
it("logs the resolved error message when run ends with assistant error", () => {
|
||||||
|
const onAgentEvent = vi.fn();
|
||||||
|
const ctx = createContext(
|
||||||
|
{
|
||||||
|
role: "assistant",
|
||||||
|
stopReason: "error",
|
||||||
|
errorMessage: "connection refused",
|
||||||
|
content: [{ type: "text", text: "" }],
|
||||||
|
},
|
||||||
|
{ onAgentEvent },
|
||||||
|
);
|
||||||
|
|
||||||
|
handleAgentEnd(ctx);
|
||||||
|
|
||||||
|
const warn = vi.mocked(ctx.log.warn);
|
||||||
|
expect(warn).toHaveBeenCalledTimes(1);
|
||||||
|
expect(warn.mock.calls[0]?.[0]).toContain("runId=run-1");
|
||||||
|
expect(warn.mock.calls[0]?.[0]).toContain("error=connection refused");
|
||||||
|
expect(onAgentEvent).toHaveBeenCalledWith({
|
||||||
|
stream: "lifecycle",
|
||||||
|
data: {
|
||||||
|
phase: "error",
|
||||||
|
error: "connection refused",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("keeps non-error run-end logging on debug only", () => {
|
||||||
|
const ctx = createContext(undefined);
|
||||||
|
|
||||||
|
handleAgentEnd(ctx);
|
||||||
|
|
||||||
|
expect(ctx.log.warn).not.toHaveBeenCalled();
|
||||||
|
expect(ctx.log.debug).toHaveBeenCalledWith("embedded run agent end: runId=run-1 isError=false");
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -29,8 +29,6 @@ export function handleAgentEnd(ctx: EmbeddedPiSubscribeContext) {
|
|||||||
const lastAssistant = ctx.state.lastAssistant;
|
const lastAssistant = ctx.state.lastAssistant;
|
||||||
const isError = isAssistantMessage(lastAssistant) && lastAssistant.stopReason === "error";
|
const isError = isAssistantMessage(lastAssistant) && lastAssistant.stopReason === "error";
|
||||||
|
|
||||||
ctx.log.debug(`embedded run agent end: runId=${ctx.params.runId} isError=${isError}`);
|
|
||||||
|
|
||||||
if (isError && lastAssistant) {
|
if (isError && lastAssistant) {
|
||||||
const friendlyError = formatAssistantErrorText(lastAssistant, {
|
const friendlyError = formatAssistantErrorText(lastAssistant, {
|
||||||
cfg: ctx.params.config,
|
cfg: ctx.params.config,
|
||||||
@@ -38,12 +36,16 @@ export function handleAgentEnd(ctx: EmbeddedPiSubscribeContext) {
|
|||||||
provider: lastAssistant.provider,
|
provider: lastAssistant.provider,
|
||||||
model: lastAssistant.model,
|
model: lastAssistant.model,
|
||||||
});
|
});
|
||||||
|
const errorText = (friendlyError || lastAssistant.errorMessage || "LLM request failed.").trim();
|
||||||
|
ctx.log.warn(
|
||||||
|
`embedded run agent end: runId=${ctx.params.runId} isError=true error=${errorText}`,
|
||||||
|
);
|
||||||
emitAgentEvent({
|
emitAgentEvent({
|
||||||
runId: ctx.params.runId,
|
runId: ctx.params.runId,
|
||||||
stream: "lifecycle",
|
stream: "lifecycle",
|
||||||
data: {
|
data: {
|
||||||
phase: "error",
|
phase: "error",
|
||||||
error: friendlyError || lastAssistant.errorMessage || "LLM request failed.",
|
error: errorText,
|
||||||
endedAt: Date.now(),
|
endedAt: Date.now(),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -51,10 +53,11 @@ export function handleAgentEnd(ctx: EmbeddedPiSubscribeContext) {
|
|||||||
stream: "lifecycle",
|
stream: "lifecycle",
|
||||||
data: {
|
data: {
|
||||||
phase: "error",
|
phase: "error",
|
||||||
error: friendlyError || lastAssistant.errorMessage || "LLM request failed.",
|
error: errorText,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
ctx.log.debug(`embedded run agent end: runId=${ctx.params.runId} isError=${isError}`);
|
||||||
emitAgentEvent({
|
emitAgentEvent({
|
||||||
runId: ctx.params.runId,
|
runId: ctx.params.runId,
|
||||||
stream: "lifecycle",
|
stream: "lifecycle",
|
||||||
|
|||||||
Reference in New Issue
Block a user