fix: stabilize CI type and test harness coverage

This commit is contained in:
Peter Steinberger
2026-02-22 18:06:29 +00:00
parent af9881b9c5
commit b79c89fc90
9 changed files with 23 additions and 15 deletions

View File

@@ -96,6 +96,7 @@ export const MODELS_CONFIG_IMPLICIT_ENV_VARS = [
"OLLAMA_API_KEY",
"OPENCLAW_AGENT_DIR",
"OPENAI_API_KEY",
"OPENROUTER_API_KEY",
"PI_CODING_AGENT_DIR",
"QIANFAN_API_KEY",
"QWEN_OAUTH_TOKEN",

View File

@@ -1,7 +1,6 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import type { SessionManager as PiSessionManager } from "@mariozechner/pi-coding-agent";
import "./test-helpers/fast-coding-tools.js";
import { afterAll, beforeAll, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
@@ -116,7 +115,7 @@ vi.mock("@mariozechner/pi-ai", async () => {
});
let runEmbeddedPiAgent: typeof import("./pi-embedded-runner/run.js").runEmbeddedPiAgent;
let SessionManager: PiSessionManager;
let SessionManager: typeof import("@mariozechner/pi-coding-agent").SessionManager;
let tempRoot: string | undefined;
let agentDir: string;
let workspaceDir: string;

View File

@@ -1,6 +1,6 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model } from "@mariozechner/pi-ai";
import { AssistantMessageEventStream } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { applyExtraParamsToAgent } from "./extra-params.js";
@@ -14,7 +14,7 @@ describe("extra-params: OpenRouter Anthropic cache_control", () => {
};
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return new AssistantMessageEventStream();
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
@@ -49,7 +49,7 @@ describe("extra-params: OpenRouter Anthropic cache_control", () => {
};
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return new AssistantMessageEventStream();
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
@@ -79,7 +79,7 @@ describe("extra-params: OpenRouter Anthropic cache_control", () => {
};
const baseStreamFn: StreamFn = (_model, _context, options) => {
options?.onPayload?.(payload);
return new AssistantMessageEventStream();
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };

View File

@@ -54,8 +54,9 @@ const TAR_GZ_TRAVERSAL_BUFFER = Buffer.from(
);
function mockArchiveResponse(buffer: Uint8Array): void {
const blobPart = Uint8Array.from(buffer);
fetchWithSsrFGuardMock.mockResolvedValue({
response: new Response(new Blob([buffer]), { status: 200 }),
response: new Response(new Blob([blobPart]), { status: 200 }),
release: async () => undefined,
});
}

View File

@@ -530,6 +530,14 @@ function loadRequesterSessionEntry(requesterSessionKey: string) {
return { cfg, entry, canonicalKey };
}
function buildAnnounceQueueKey(sessionKey: string, origin?: DeliveryContext): string {
const accountId = normalizeAccountId(origin?.accountId);
if (!accountId) {
return sessionKey;
}
return `${sessionKey}:acct:${accountId}`;
}
async function maybeQueueSubagentAnnounce(params: {
requesterSessionKey: string;
announceId?: string;
@@ -567,7 +575,7 @@ async function maybeQueueSubagentAnnounce(params: {
if (isActive && (shouldFollowup || queueSettings.mode === "steer")) {
const origin = resolveAnnounceOrigin(entry, params.requesterOrigin);
enqueueAnnounce({
key: canonicalKey,
key: buildAnnounceQueueKey(canonicalKey, origin),
item: {
announceId: params.announceId,
prompt: params.triggerMessage,