perf(test): trim duplicate gateway and auto-reply test overhead

This commit is contained in:
Peter Steinberger
2026-02-13 23:40:25 +00:00
parent ad57e561c6
commit 5caf829d28
6 changed files with 34 additions and 234 deletions

View File

@@ -36,7 +36,7 @@ describe("gateway config reload during reply", () => {
const dispatcher = createReplyDispatcher({
deliver: async (payload) => {
// Simulate async reply delivery
await new Promise((resolve) => setTimeout(resolve, 100));
await new Promise((resolve) => setTimeout(resolve, 20));
deliveredReplies.push(payload.text ?? "");
},
onError: (err) => {
@@ -103,49 +103,4 @@ describe("gateway config reload during reply", () => {
expect(deliverCalled).toBe(false);
expect(getTotalPendingReplies()).toBe(0);
});
it("should integrate dispatcher reservation with concurrent dispatchers", async () => {
const { createReplyDispatcher } = await import("../auto-reply/reply/reply-dispatcher.js");
const { getTotalQueueSize } = await import("../process/command-queue.js");
const deliveredReplies: string[] = [];
const dispatcher = createReplyDispatcher({
deliver: async (payload) => {
await new Promise((resolve) => setTimeout(resolve, 50));
deliveredReplies.push(payload.text ?? "");
},
});
// Dispatcher has reservation (pending=1)
expect(getTotalPendingReplies()).toBe(1);
// Total active = queue + pending
const totalActive = getTotalQueueSize() + getTotalPendingReplies();
expect(totalActive).toBe(1); // 0 queue + 1 pending
// Command finishes, replies enqueued
dispatcher.sendFinalReply({ text: "Reply 1" });
dispatcher.sendFinalReply({ text: "Reply 2" });
// Now: pending=3 (reservation + 2 replies)
expect(getTotalPendingReplies()).toBe(3);
// Mark complete (flags reservation for cleanup on last delivery)
dispatcher.markComplete();
// Reservation still counted until delivery .finally() clears it,
// but the important invariant is pending > 0 while deliveries are in flight.
expect(getTotalPendingReplies()).toBeGreaterThan(0);
// Wait for replies
await dispatcher.waitForIdle();
// Replies sent, pending=0
expect(getTotalPendingReplies()).toBe(0);
expect(deliveredReplies).toEqual(["Reply 1", "Reply 2"]);
// Now everything is idle
expect(getTotalPendingReplies()).toBe(0);
expect(getTotalQueueSize()).toBe(0);
});
});

View File

@@ -31,7 +31,7 @@ describe("gateway restart deferral integration", () => {
const dispatcher = createReplyDispatcher({
deliver: async (payload) => {
// Simulate network delay
await new Promise((resolve) => setTimeout(resolve, 100));
await new Promise((resolve) => setTimeout(resolve, 20));
deliveredReplies.push({
text: payload.text ?? "",
timestamp: Date.now(),
@@ -116,84 +116,4 @@ describe("gateway restart deferral integration", () => {
"restart-can-proceed",
]);
});
it("should handle concurrent dispatchers with config changes", async () => {
const { createReplyDispatcher } = await import("../auto-reply/reply/reply-dispatcher.js");
const { getTotalPendingReplies } = await import("../auto-reply/reply/dispatcher-registry.js");
// Simulate two messages being processed concurrently
const deliveredReplies: string[] = [];
// Message 1 — dispatcher created
const dispatcher1 = createReplyDispatcher({
deliver: async (payload) => {
await new Promise((resolve) => setTimeout(resolve, 50));
deliveredReplies.push(`msg1: ${payload.text}`);
},
});
// Message 2 — dispatcher created
const dispatcher2 = createReplyDispatcher({
deliver: async (payload) => {
await new Promise((resolve) => setTimeout(resolve, 50));
deliveredReplies.push(`msg2: ${payload.text}`);
},
});
// Both dispatchers have reservations
expect(getTotalPendingReplies()).toBe(2);
// Config change detected - should defer
const totalActive = getTotalPendingReplies();
expect(totalActive).toBe(2); // 2 dispatcher reservations
// Messages process and send replies
dispatcher1.sendFinalReply({ text: "Reply from message 1" });
dispatcher1.markComplete();
dispatcher2.sendFinalReply({ text: "Reply from message 2" });
dispatcher2.markComplete();
// Wait for both
await Promise.all([dispatcher1.waitForIdle(), dispatcher2.waitForIdle()]);
// All idle
expect(getTotalPendingReplies()).toBe(0);
// Replies delivered
expect(deliveredReplies).toHaveLength(2);
});
it("should handle rapid config changes without losing replies", async () => {
const { createReplyDispatcher } = await import("../auto-reply/reply/reply-dispatcher.js");
const { getTotalPendingReplies } = await import("../auto-reply/reply/dispatcher-registry.js");
const deliveredReplies: string[] = [];
// Message received — dispatcher created
const dispatcher = createReplyDispatcher({
deliver: async (payload) => {
await new Promise((resolve) => setTimeout(resolve, 200)); // Slow network
deliveredReplies.push(payload.text ?? "");
},
});
// Config change 1, 2, 3 (rapid changes)
// All should be deferred because dispatcher has pending replies
// Send replies
dispatcher.sendFinalReply({ text: "Processing..." });
dispatcher.sendFinalReply({ text: "Almost done..." });
dispatcher.sendFinalReply({ text: "Complete!" });
dispatcher.markComplete();
// Wait for all replies
await dispatcher.waitForIdle();
// All replies should be delivered
expect(deliveredReplies).toEqual(["Processing...", "Almost done...", "Complete!"]);
// Now restart can proceed
expect(getTotalPendingReplies()).toBe(0);
});
});

View File

@@ -36,7 +36,7 @@ describe("real scenario: config change during message processing", () => {
throw new Error(error);
}
// Slow delivery — restart checks will run during this window
await new Promise((resolve) => setTimeout(resolve, 500));
await new Promise((resolve) => setTimeout(resolve, 150));
deliveredReplies.push(payload.text ?? "");
},
onError: () => {
@@ -59,7 +59,7 @@ describe("real scenario: config change during message processing", () => {
// If the tracking is broken, pending would be 0 and we'd restart.
let restartTriggered = false;
for (let i = 0; i < 3; i++) {
await new Promise((resolve) => setTimeout(resolve, 100));
await new Promise((resolve) => setTimeout(resolve, 25));
const pending = getTotalPendingReplies();
if (pending === 0) {
restartTriggered = true;
@@ -86,7 +86,7 @@ describe("real scenario: config change during message processing", () => {
const dispatcher = createReplyDispatcher({
deliver: async (_payload) => {
await new Promise((resolve) => setTimeout(resolve, 50));
await new Promise((resolve) => setTimeout(resolve, 10));
},
});
@@ -94,7 +94,7 @@ describe("real scenario: config change during message processing", () => {
expect(getTotalPendingReplies()).toBe(1);
// Simulate command processing delay BEFORE reply is enqueued
await new Promise((resolve) => setTimeout(resolve, 100));
await new Promise((resolve) => setTimeout(resolve, 20));
// During this delay, pending should STILL be 1 (reservation active)
expect(getTotalPendingReplies()).toBe(1);