fix(telegram): fix streaming with extended thinking models overwriting previous messages/ also happens to Execution error (#17973)

Merged via /review-pr -> /prepare-pr -> /merge-pr.

Prepared head SHA: 34b52eead8
Co-authored-by: Marvae <11957602+Marvae@users.noreply.github.com>
Co-authored-by: obviyus <22031114+obviyus@users.noreply.github.com>
Reviewed-by: @obviyus
This commit is contained in:
Hongwei Ma
2026-02-16 21:24:34 +08:00
committed by GitHub
parent 553d17f8af
commit dddb1bc942
14 changed files with 260 additions and 134 deletions

View File

@@ -471,6 +471,7 @@ export async function runEmbeddedPiAgent(
blockReplyBreak: params.blockReplyBreak,
blockReplyChunking: params.blockReplyChunking,
onReasoningStream: params.onReasoningStream,
onReasoningEnd: params.onReasoningEnd,
onToolResult: params.onToolResult,
onAgentEvent: params.onAgentEvent,
extraSystemPrompt: params.extraSystemPrompt,

View File

@@ -737,6 +737,7 @@ export async function runEmbeddedAttempt(
shouldEmitToolOutput: params.shouldEmitToolOutput,
onToolResult: params.onToolResult,
onReasoningStream: params.onReasoningStream,
onReasoningEnd: params.onReasoningEnd,
onBlockReply: params.onBlockReply,
onBlockReplyFlush: params.onBlockReplyFlush,
blockReplyBreak: params.blockReplyBreak,

View File

@@ -95,6 +95,7 @@ export type RunEmbeddedPiAgentParams = {
blockReplyBreak?: "text_end" | "message_end";
blockReplyChunking?: BlockReplyChunking;
onReasoningStream?: (payload: { text?: string; mediaUrls?: string[] }) => void | Promise<void>;
onReasoningEnd?: () => void | Promise<void>;
onToolResult?: (payload: { text?: string; mediaUrls?: string[] }) => void | Promise<void>;
onAgentEvent?: (evt: { stream: string; data: Record<string, unknown> }) => void;
lane?: string;

View File

@@ -184,6 +184,29 @@ describe("buildEmbeddedRunPayloads", () => {
expect(payloads[0]?.text).toContain("code 1");
});
it("does not add tool error fallback when assistant text exists after tool calls", () => {
const payloads = buildPayloads({
assistantTexts: ["Checked the page and recovered with final answer."],
lastAssistant: makeAssistant({
stopReason: "toolUse",
errorMessage: undefined,
content: [
{
type: "toolCall",
id: "toolu_01",
name: "browser",
arguments: { action: "search", query: "openclaw docs" },
},
],
}),
lastToolError: { toolName: "browser", error: "connection timeout" },
});
expect(payloads).toHaveLength(1);
expect(payloads[0]?.isError).toBeUndefined();
expect(payloads[0]?.text).toContain("recovered");
});
it("suppresses recoverable tool errors containing 'required' for non-mutating tools", () => {
const payloads = buildPayloads({
lastToolError: { toolName: "browser", error: "url required" },

View File

@@ -218,6 +218,7 @@ export function buildEmbeddedRunPayloads(params: {
: []
).filter((text) => !shouldSuppressRawErrorText(text));
let hasUserFacingAssistantReply = false;
for (const text of answerTexts) {
const {
text: cleanedText,
@@ -238,22 +239,13 @@ export function buildEmbeddedRunPayloads(params: {
replyToTag,
replyToCurrent,
});
hasUserFacingAssistantReply = true;
}
if (params.lastToolError) {
const lastAssistantHasToolCalls =
Array.isArray(params.lastAssistant?.content) &&
params.lastAssistant?.content.some((block) =>
block && typeof block === "object"
? (block as { type?: unknown }).type === "toolCall"
: false,
);
const lastAssistantWasToolUse = params.lastAssistant?.stopReason === "toolUse";
const hasUserFacingReply =
replyItems.length > 0 && !lastAssistantHasToolCalls && !lastAssistantWasToolUse;
const shouldShowToolError = shouldShowToolErrorWarning({
lastToolError: params.lastToolError,
hasUserFacingReply,
hasUserFacingReply: hasUserFacingAssistantReply,
suppressToolErrors: Boolean(params.config?.messages?.suppressToolErrors),
});

View File

@@ -140,7 +140,12 @@ export function handleMessageUpdate(
})
.trim();
if (next) {
const wasThinking = ctx.state.partialBlockState.thinking;
const visibleDelta = chunk ? ctx.stripBlockTags(chunk, ctx.state.partialBlockState) : "";
// Detect when thinking block ends (</think> tag processed)
if (wasThinking && !ctx.state.partialBlockState.thinking) {
void ctx.params.onReasoningEnd?.();
}
const parsedDelta = visibleDelta ? ctx.consumePartialReplyDirectives(visibleDelta) : null;
const parsedFull = parseReplyDirectives(stripTrailingDirective(next));
const cleanedText = parsedFull.text;

View File

@@ -17,6 +17,8 @@ export type SubscribeEmbeddedPiSessionParams = {
shouldEmitToolOutput?: () => boolean;
onToolResult?: (payload: { text?: string; mediaUrls?: string[] }) => void | Promise<void>;
onReasoningStream?: (payload: { text?: string; mediaUrls?: string[] }) => void | Promise<void>;
/** Called when a thinking/reasoning block ends (</think> tag processed). */
onReasoningEnd?: () => void | Promise<void>;
onBlockReply?: (payload: {
text?: string;
mediaUrls?: string[];