test(cron): dedupe migration and regression fixtures

This commit is contained in:
Peter Steinberger
2026-02-18 12:20:48 +00:00
parent 2fd211b705
commit eabf187fa5
2 changed files with 139 additions and 153 deletions

View File

@@ -76,6 +76,33 @@ function createDefaultIsolatedRunner(): CronServiceOptions["runIsolatedAgentJob"
}) as CronServiceOptions["runIsolatedAgentJob"];
}
function createIsolatedRegressionJob(params: {
id: string;
name: string;
scheduledAt: number;
schedule: CronJob["schedule"];
payload: CronJob["payload"];
state?: CronJobState;
}): CronJob {
return {
id: params.id,
name: params.name,
enabled: true,
createdAtMs: params.scheduledAt - 86_400_000,
updatedAtMs: params.scheduledAt - 86_400_000,
schedule: params.schedule,
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: params.payload,
delivery: { mode: "announce" },
state: params.state ?? {},
};
}
async function writeCronJobs(storePath: string, jobs: CronJob[]) {
await fs.writeFile(storePath, JSON.stringify({ version: 1, jobs }, null, 2), "utf-8");
}
async function startCronForStore(params: {
storePath: string;
cronEnabled?: boolean;
@@ -531,24 +558,15 @@ describe("Cron issue regressions", () => {
const scheduledAt = Date.parse("2026-02-15T13:00:00.000Z");
const nextDay = scheduledAt + 86_400_000;
const cronJob: CronJob = {
const cronJob = createIsolatedRegressionJob({
id: "spin-loop-17821",
name: "daily noon",
enabled: true,
createdAtMs: scheduledAt - 86_400_000,
updatedAtMs: scheduledAt - 86_400_000,
scheduledAt,
schedule: { kind: "cron", expr: "0 13 * * *", tz: "UTC" },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "briefing" },
delivery: { mode: "announce" },
state: { nextRunAtMs: scheduledAt },
};
await fs.writeFile(
store.storePath,
JSON.stringify({ version: 1, jobs: [cronJob] }, null, 2),
"utf-8",
);
});
await writeCronJobs(store.storePath, [cronJob]);
let now = scheduledAt;
let fireCount = 0;
@@ -591,24 +609,15 @@ describe("Cron issue regressions", () => {
const store = await makeStorePath();
const scheduledAt = Date.parse("2026-02-15T13:00:00.000Z");
const cronJob: CronJob = {
const cronJob = createIsolatedRegressionJob({
id: "spin-gap-17821",
name: "second-granularity",
enabled: true,
createdAtMs: scheduledAt - 86_400_000,
updatedAtMs: scheduledAt - 86_400_000,
scheduledAt,
schedule: { kind: "cron", expr: "* * * * * *", tz: "UTC" },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "pulse" },
delivery: { mode: "announce" },
state: { nextRunAtMs: scheduledAt },
};
await fs.writeFile(
store.storePath,
JSON.stringify({ version: 1, jobs: [cronJob] }, null, 2),
"utf-8",
);
});
await writeCronJobs(store.storePath, [cronJob]);
let now = scheduledAt;
const state = createCronServiceState({
@@ -638,24 +647,15 @@ describe("Cron issue regressions", () => {
const store = await makeStorePath();
const scheduledAt = Date.parse("2026-02-15T13:00:00.000Z");
const cronJob: CronJob = {
const cronJob = createIsolatedRegressionJob({
id: "no-timeout-0",
name: "no-timeout",
enabled: true,
createdAtMs: scheduledAt - 86_400_000,
updatedAtMs: scheduledAt - 86_400_000,
scheduledAt,
schedule: { kind: "at", at: new Date(scheduledAt).toISOString() },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "work", timeoutSeconds: 0 },
delivery: { mode: "announce" },
state: { nextRunAtMs: scheduledAt },
};
await fs.writeFile(
store.storePath,
JSON.stringify({ version: 1, jobs: [cronJob] }, null, 2),
"utf-8",
);
});
await writeCronJobs(store.storePath, [cronJob]);
let now = scheduledAt;
const deferredRun = createDeferred<{ status: "ok"; summary: string }>();
@@ -692,19 +692,13 @@ describe("Cron issue regressions", () => {
it("retries cron schedule computation from the next second when the first attempt returns undefined (#17821)", () => {
const scheduledAt = Date.parse("2026-02-15T13:00:00.000Z");
const cronJob: CronJob = {
const cronJob = createIsolatedRegressionJob({
id: "retry-next-second-17821",
name: "retry",
enabled: true,
createdAtMs: scheduledAt - 86_400_000,
updatedAtMs: scheduledAt - 86_400_000,
scheduledAt,
schedule: { kind: "cron", expr: "0 13 * * *", tz: "UTC" },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: { kind: "agentTurn", message: "briefing" },
delivery: { mode: "announce" },
state: {},
};
});
const original = schedule.computeNextRunAtMs;
const spy = vi.spyOn(schedule, "computeNextRunAtMs");

View File

@@ -24,6 +24,11 @@ async function makeStorePath() {
};
}
async function writeLegacyStore(storePath: string, legacyJob: Record<string, unknown>) {
await fs.mkdir(path.dirname(storePath), { recursive: true });
await fs.writeFile(storePath, JSON.stringify({ version: 1, jobs: [legacyJob] }, null, 2));
}
async function migrateAndLoadFirstJob(storePath: string): Promise<Record<string, unknown>> {
const cron = new CronService({
storePath,
@@ -41,6 +46,37 @@ async function migrateAndLoadFirstJob(storePath: string): Promise<Record<string,
return loaded.jobs[0] as Record<string, unknown>;
}
function makeLegacyJob(overrides: Record<string, unknown>): Record<string, unknown> {
return {
id: "job-legacy",
agentId: undefined,
name: "Legacy job",
description: null,
enabled: true,
deleteAfterRun: false,
createdAtMs: 1_700_000_000_000,
updatedAtMs: 1_700_000_000_000,
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: {
kind: "systemEvent",
text: "tick",
},
state: {},
...overrides,
};
}
async function migrateLegacyJob(legacyJob: Record<string, unknown>) {
const store = await makeStorePath();
try {
await writeLegacyStore(store.storePath, legacyJob);
return await migrateAndLoadFirstJob(store.storePath);
} finally {
await store.cleanup();
}
}
describe("cron store migration", () => {
beforeEach(() => {
noopLogger.debug.mockClear();
@@ -54,36 +90,24 @@ describe("cron store migration", () => {
});
it("migrates isolated jobs to announce delivery and drops isolation", async () => {
const store = await makeStorePath();
const atMs = 1_700_000_000_000;
const legacyJob = {
id: "job-1",
agentId: undefined,
sessionKey: " agent:main:discord:channel:ops ",
name: "Legacy job",
description: null,
enabled: true,
deleteAfterRun: false,
createdAtMs: 1_700_000_000_000,
updatedAtMs: 1_700_000_000_000,
schedule: { kind: "at", atMs },
sessionTarget: "isolated",
wakeMode: "next-heartbeat",
payload: {
kind: "agentTurn",
message: "hi",
deliver: true,
channel: "telegram",
to: "7200373102",
bestEffortDeliver: true,
},
isolation: { postToMainPrefix: "Cron" },
state: {},
};
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(store.storePath, JSON.stringify({ version: 1, jobs: [legacyJob] }, null, 2));
const migrated = await migrateAndLoadFirstJob(store.storePath);
const migrated = await migrateLegacyJob(
makeLegacyJob({
id: "job-1",
sessionKey: " agent:main:discord:channel:ops ",
schedule: { kind: "at", atMs },
sessionTarget: "isolated",
payload: {
kind: "agentTurn",
message: "hi",
deliver: true,
channel: "telegram",
to: "7200373102",
bestEffortDeliver: true,
},
isolation: { postToMainPrefix: "Cron" },
}),
);
expect(migrated.sessionKey).toBe("agent:main:discord:channel:ops");
expect(migrated.delivery).toEqual({
mode: "announce",
@@ -102,103 +126,71 @@ describe("cron store migration", () => {
const schedule = migrated.schedule as Record<string, unknown>;
expect(schedule.kind).toBe("at");
expect(schedule.at).toBe(new Date(atMs).toISOString());
await store.cleanup();
});
it("adds anchorMs to legacy every schedules", async () => {
const store = await makeStorePath();
const createdAtMs = 1_700_000_000_000;
const legacyJob = {
id: "job-every-legacy",
agentId: undefined,
name: "Legacy every",
description: null,
enabled: true,
deleteAfterRun: false,
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "every", everyMs: 120_000 },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: {
kind: "systemEvent",
text: "tick",
},
state: {},
};
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(store.storePath, JSON.stringify({ version: 1, jobs: [legacyJob] }, null, 2));
const migrated = await migrateAndLoadFirstJob(store.storePath);
const migrated = await migrateLegacyJob(
makeLegacyJob({
id: "job-every-legacy",
name: "Legacy every",
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "every", everyMs: 120_000 },
}),
);
const schedule = migrated.schedule as Record<string, unknown>;
expect(schedule.kind).toBe("every");
expect(schedule.anchorMs).toBe(createdAtMs);
await store.cleanup();
});
it("adds default staggerMs to legacy recurring top-of-hour cron schedules", async () => {
const store = await makeStorePath();
const createdAtMs = 1_700_000_000_000;
const legacyJob = {
id: "job-cron-legacy",
agentId: undefined,
name: "Legacy cron",
description: null,
enabled: true,
deleteAfterRun: false,
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "cron", expr: "0 */2 * * *", tz: "UTC" },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: {
kind: "systemEvent",
text: "tick",
},
state: {},
};
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(store.storePath, JSON.stringify({ version: 1, jobs: [legacyJob] }, null, 2));
const migrated = await migrateAndLoadFirstJob(store.storePath);
const migrated = await migrateLegacyJob(
makeLegacyJob({
id: "job-cron-legacy",
name: "Legacy cron",
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "cron", expr: "0 */2 * * *", tz: "UTC" },
}),
);
const schedule = migrated.schedule as Record<string, unknown>;
expect(schedule.kind).toBe("cron");
expect(schedule.staggerMs).toBe(DEFAULT_TOP_OF_HOUR_STAGGER_MS);
await store.cleanup();
});
it("adds default staggerMs to legacy 6-field top-of-hour cron schedules", async () => {
const store = await makeStorePath();
const createdAtMs = 1_700_000_000_000;
const legacyJob = {
id: "job-cron-seconds-legacy",
agentId: undefined,
name: "Legacy cron seconds",
description: null,
enabled: true,
deleteAfterRun: false,
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "cron", expr: "0 0 */3 * * *", tz: "UTC" },
sessionTarget: "main",
wakeMode: "next-heartbeat",
payload: {
kind: "systemEvent",
text: "tick",
},
state: {},
};
await fs.mkdir(path.dirname(store.storePath), { recursive: true });
await fs.writeFile(store.storePath, JSON.stringify({ version: 1, jobs: [legacyJob] }, null, 2));
const migrated = await migrateAndLoadFirstJob(store.storePath);
const migrated = await migrateLegacyJob(
makeLegacyJob({
id: "job-cron-seconds-legacy",
name: "Legacy cron seconds",
createdAtMs,
updatedAtMs: createdAtMs,
schedule: { kind: "cron", expr: "0 0 */3 * * *", tz: "UTC" },
}),
);
const schedule = migrated.schedule as Record<string, unknown>;
expect(schedule.kind).toBe("cron");
expect(schedule.staggerMs).toBe(DEFAULT_TOP_OF_HOUR_STAGGER_MS);
});
await store.cleanup();
it("removes invalid legacy staggerMs from non top-of-hour cron schedules", async () => {
const migrated = await migrateLegacyJob(
makeLegacyJob({
id: "job-cron-minute-legacy",
name: "Legacy minute cron",
schedule: {
kind: "cron",
expr: "17 * * * *",
tz: "UTC",
staggerMs: "bogus",
},
}),
);
const schedule = migrated.schedule as Record<string, unknown>;
expect(schedule.kind).toBe("cron");
expect(schedule.staggerMs).toBeUndefined();
});
});