fix: harden flaky tests and cover native google thought signatures (#23457) (thanks @echoVic)

This commit is contained in:
Peter Steinberger
2026-02-22 12:22:38 +01:00
parent 9176571ec1
commit 401106b963
9 changed files with 110 additions and 13 deletions

View File

@@ -231,6 +231,72 @@ describe("sanitizeSessionHistory (google thinking)", () => {
]);
});
it("strips non-base64 thought signatures for native Google Gemini", async () => {
const sessionManager = SessionManager.inMemory();
const input = [
{
role: "user",
content: "hi",
},
{
role: "assistant",
content: [
{ type: "text", text: "hello", thought_signature: "msg_abc123" },
{ type: "thinking", thinking: "ok", thought_signature: "c2ln" },
{
type: "toolCall",
id: "call_1",
name: "read",
arguments: { path: "/tmp/foo" },
thoughtSignature: '{"id":1}',
},
{
type: "toolCall",
id: "call_2",
name: "read",
arguments: { path: "/tmp/bar" },
thoughtSignature: "c2ln",
},
],
},
] as unknown as AgentMessage[];
const out = await sanitizeSessionHistory({
messages: input,
modelApi: "google-generative-ai",
provider: "google",
modelId: "gemini-2.0-flash",
sessionManager,
sessionId: "session:google-gemini",
});
const assistant = out.find((msg) => (msg as { role?: string }).role === "assistant") as {
content?: Array<{
type?: string;
thought_signature?: string;
thoughtSignature?: string;
thinking?: string;
}>;
};
expect(assistant.content).toEqual([
{ type: "text", text: "hello" },
{ type: "thinking", thinking: "ok", thought_signature: "c2ln" },
{
type: "toolCall",
id: "call1",
name: "read",
arguments: { path: "/tmp/foo" },
},
{
type: "toolCall",
id: "call2",
name: "read",
arguments: { path: "/tmp/bar" },
thoughtSignature: "c2ln",
},
]);
});
it("keeps mixed signed/unsigned thinking blocks for Google models", async () => {
const sessionManager = SessionManager.inMemory();
const input = [

View File

@@ -130,7 +130,7 @@ beforeAll(async () => {
workspaceDir = path.join(tempRoot, "workspace");
await fs.mkdir(agentDir, { recursive: true });
await fs.mkdir(workspaceDir, { recursive: true });
}, 60_000);
}, 180_000);
afterAll(async () => {
if (!tempRoot) {

View File

@@ -1,10 +1,11 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import "./test-helpers/fast-core-tools.js";
import {
getCallGatewayMock,
getSessionsSpawnTool,
setSessionsSpawnConfigOverride,
} from "./openclaw-tools.subagents.sessions-spawn.test-harness.js";
import { resetSubagentRegistryForTests } from "./subagent-registry.js";
const hookRunnerMocks = vi.hoisted(() => ({
hasSubagentEndedHook: true,
@@ -79,6 +80,7 @@ function mockAgentStartFailure() {
describe("sessions_spawn subagent lifecycle hooks", () => {
beforeEach(() => {
resetSubagentRegistryForTests();
hookRunnerMocks.hasSubagentEndedHook = true;
hookRunnerMocks.runSubagentSpawning.mockClear();
hookRunnerMocks.runSubagentSpawned.mockClear();
@@ -103,6 +105,10 @@ describe("sessions_spawn subagent lifecycle hooks", () => {
});
});
afterEach(() => {
resetSubagentRegistryForTests();
});
it("runs subagent_spawning and emits subagent_spawned with requester metadata", async () => {
const tool = await getSessionsSpawnTool({
agentSessionKey: "main",

View File

@@ -19,6 +19,10 @@ describe("resolveTranscriptPolicy", () => {
modelApi: "google-generative-ai",
});
expect(policy.sanitizeToolCallIds).toBe(true);
expect(policy.sanitizeThoughtSignatures).toEqual({
allowBase64Only: true,
includeCamelCase: true,
});
});
it("enables sanitizeToolCallIds for Mistral provider", () => {