test(agents): dedupe agent and cron test scaffolds

This commit is contained in:
Peter Steinberger
2026-03-02 06:40:42 +00:00
parent 281494ae52
commit 7e29d604ba
38 changed files with 3114 additions and 4486 deletions

View File

@@ -12,6 +12,21 @@ import {
wrapStreamFnTrimToolCallNames,
} from "./attempt.js";
function createOllamaProviderConfig(injectNumCtxForOpenAICompat: boolean): OpenClawConfig {
return {
models: {
providers: {
ollama: {
baseUrl: "http://127.0.0.1:11434/v1",
api: "openai-completions",
injectNumCtxForOpenAICompat,
models: [],
},
},
},
};
}
describe("resolvePromptBuildHookResult", () => {
function createLegacyOnlyHookRunner() {
return {
@@ -129,6 +144,25 @@ describe("wrapStreamFnTrimToolCallNames", () => {
};
}
async function invokeWrappedStream(
baseFn: (...args: never[]) => unknown,
allowedToolNames?: Set<string>,
) {
const wrappedFn = wrapStreamFnTrimToolCallNames(baseFn as never, allowedToolNames);
return await wrappedFn({} as never, {} as never, {} as never);
}
function createEventStream(params: {
event: unknown;
finalToolCall: { type: string; name: string };
}) {
const finalMessage = { role: "assistant", content: [params.finalToolCall] };
const baseFn = vi.fn(() =>
createFakeStream({ events: [params.event], resultMessage: finalMessage }),
);
return { baseFn, finalMessage };
}
it("trims whitespace from live streamed tool call names and final result message", async () => {
const partialToolCall = { type: "toolCall", name: " read " };
const messageToolCall = { type: "toolCall", name: " exec " };
@@ -138,13 +172,9 @@ describe("wrapStreamFnTrimToolCallNames", () => {
partial: { role: "assistant", content: [partialToolCall] },
message: { role: "assistant", content: [messageToolCall] },
};
const finalMessage = { role: "assistant", content: [finalToolCall] };
const baseFn = vi.fn(() => createFakeStream({ events: [event], resultMessage: finalMessage }));
const { baseFn, finalMessage } = createEventStream({ event, finalToolCall });
const wrappedFn = wrapStreamFnTrimToolCallNames(baseFn as never);
const stream = wrappedFn({} as never, {} as never, {} as never) as Awaited<
ReturnType<typeof wrappedFn>
>;
const stream = await invokeWrappedStream(baseFn);
const seenEvents: unknown[] = [];
for await (const item of stream) {
@@ -170,8 +200,7 @@ describe("wrapStreamFnTrimToolCallNames", () => {
}),
);
const wrappedFn = wrapStreamFnTrimToolCallNames(baseFn as never);
const stream = await wrappedFn({} as never, {} as never, {} as never);
const stream = await invokeWrappedStream(baseFn);
const result = await stream.result();
expect(finalToolCall.name).toBe("browser");
@@ -188,10 +217,7 @@ describe("wrapStreamFnTrimToolCallNames", () => {
}),
);
const wrappedFn = wrapStreamFnTrimToolCallNames(baseFn as never, new Set(["exec"]));
const stream = wrappedFn({} as never, {} as never, {} as never) as Awaited<
ReturnType<typeof wrappedFn>
>;
const stream = await invokeWrappedStream(baseFn, new Set(["exec"]));
const result = await stream.result();
expect(finalToolCall.name).toBe("exec");
@@ -205,13 +231,9 @@ describe("wrapStreamFnTrimToolCallNames", () => {
type: "toolcall_delta",
partial: { role: "assistant", content: [partialToolCall] },
};
const finalMessage = { role: "assistant", content: [finalToolCall] };
const baseFn = vi.fn(() => createFakeStream({ events: [event], resultMessage: finalMessage }));
const { baseFn } = createEventStream({ event, finalToolCall });
const wrappedFn = wrapStreamFnTrimToolCallNames(baseFn as never);
const stream = wrappedFn({} as never, {} as never, {} as never) as Awaited<
ReturnType<typeof wrappedFn>
>;
const stream = await invokeWrappedStream(baseFn);
for await (const _item of stream) {
// drain
@@ -346,18 +368,7 @@ describe("resolveOllamaCompatNumCtxEnabled", () => {
it("returns false when provider flag is explicitly disabled", () => {
expect(
resolveOllamaCompatNumCtxEnabled({
config: {
models: {
providers: {
ollama: {
baseUrl: "http://127.0.0.1:11434/v1",
api: "openai-completions",
injectNumCtxForOpenAICompat: false,
models: [],
},
},
},
},
config: createOllamaProviderConfig(false),
providerId: "ollama",
}),
).toBe(false);
@@ -385,18 +396,7 @@ describe("shouldInjectOllamaCompatNumCtx", () => {
api: "openai-completions",
baseUrl: "http://127.0.0.1:11434/v1",
},
config: {
models: {
providers: {
ollama: {
baseUrl: "http://127.0.0.1:11434/v1",
api: "openai-completions",
injectNumCtxForOpenAICompat: false,
models: [],
},
},
},
},
config: createOllamaProviderConfig(false),
providerId: "ollama",
}),
).toBe(false);