fix(agents): detect Kimi model-token-limit overflows

Co-authored-by: Danilo Falcão <danilo@falcao.org>
This commit is contained in:
Peter Steinberger
2026-02-23 05:15:55 +01:00
parent 3640484e28
commit 9bd04849ed
4 changed files with 19 additions and 0 deletions

View File

@@ -29,6 +29,12 @@ describe("formatAssistantErrorText", () => {
);
expect(formatAssistantErrorText(msg)).toContain("Context overflow");
});
it("returns context overflow for Kimi 'model token limit' errors", () => {
const msg = makeAssistantError(
"error, status code: 400, message: Invalid request: Your request exceeded model token limit: 262144 (requested: 291351)",
);
expect(formatAssistantErrorText(msg)).toContain("Context overflow");
});
it("returns a friendly message for Anthropic role ordering", () => {
const msg = makeAssistantError('messages: roles must alternate between "user" and "assistant"');
expect(formatAssistantErrorText(msg)).toContain("Message ordering conflict");