test(agents): share assistant error message test fixture

This commit is contained in:
Peter Steinberger
2026-02-19 08:53:50 +00:00
parent f57ba32f88
commit b41fd20741
3 changed files with 47 additions and 49 deletions

View File

@@ -6,32 +6,14 @@ import {
formatAssistantErrorText, formatAssistantErrorText,
formatRawAssistantErrorForUi, formatRawAssistantErrorForUi,
} from "./pi-embedded-helpers.js"; } from "./pi-embedded-helpers.js";
import { makeAssistantMessageFixture } from "./test-helpers/assistant-message-fixtures.js";
describe("formatAssistantErrorText", () => { describe("formatAssistantErrorText", () => {
const makeAssistantError = (errorMessage: string): AssistantMessage => ({ const makeAssistantError = (errorMessage: string): AssistantMessage =>
role: "assistant", makeAssistantMessageFixture({
api: "openai-responses", errorMessage,
provider: "openai", content: [{ type: "text", text: errorMessage }],
model: "test-model", });
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
stopReason: "error",
errorMessage,
content: [{ type: "text", text: errorMessage }],
timestamp: 0,
});
it("returns a friendly message for context overflow", () => { it("returns a friendly message for context overflow", () => {
const msg = makeAssistantError("request_too_large"); const msg = makeAssistantError("request_too_large");

View File

@@ -1,6 +1,7 @@
import type { AssistantMessage } from "@mariozechner/pi-ai"; import type { AssistantMessage } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "vitest";
import { formatBillingErrorMessage } from "../../pi-embedded-helpers.js"; import { formatBillingErrorMessage } from "../../pi-embedded-helpers.js";
import { makeAssistantMessageFixture } from "../../test-helpers/assistant-message-fixtures.js";
import { buildEmbeddedRunPayloads } from "./payloads.js"; import { buildEmbeddedRunPayloads } from "./payloads.js";
describe("buildEmbeddedRunPayloads", () => { describe("buildEmbeddedRunPayloads", () => {
@@ -15,31 +16,12 @@ describe("buildEmbeddedRunPayloads", () => {
}, },
"request_id": "req_011CX7DwS7tSvggaNHmefwWg" "request_id": "req_011CX7DwS7tSvggaNHmefwWg"
}`; }`;
const makeAssistant = (overrides: Partial<AssistantMessage>): AssistantMessage => ({ const makeAssistant = (overrides: Partial<AssistantMessage>): AssistantMessage =>
role: "assistant", makeAssistantMessageFixture({
api: "openai-responses", errorMessage: errorJson,
provider: "openai", content: [{ type: "text", text: errorJson }],
model: "test-model", ...overrides,
usage: { });
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
},
timestamp: 0,
stopReason: "error",
errorMessage: errorJson,
content: [{ type: "text", text: errorJson }],
...overrides,
});
type BuildPayloadParams = Parameters<typeof buildEmbeddedRunPayloads>[0]; type BuildPayloadParams = Parameters<typeof buildEmbeddedRunPayloads>[0];
const buildPayloads = (overrides: Partial<BuildPayloadParams> = {}) => const buildPayloads = (overrides: Partial<BuildPayloadParams> = {}) =>

View File

@@ -0,0 +1,34 @@
import type { AssistantMessage } from "@mariozechner/pi-ai";
const ZERO_USAGE: AssistantMessage["usage"] = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
total: 0,
},
};
export function makeAssistantMessageFixture(
overrides: Partial<AssistantMessage> = {},
): AssistantMessage {
const errorText = typeof overrides.errorMessage === "string" ? overrides.errorMessage : "error";
return {
role: "assistant",
api: "openai-responses",
provider: "openai",
model: "test-model",
usage: ZERO_USAGE,
timestamp: 0,
stopReason: "error",
errorMessage: errorText,
content: [{ type: "text", text: errorText }],
...overrides,
};
}