Session/Cron maintenance hardening and cleanup UX (#24753)

Merged via /review-pr -> /prepare-pr -> /merge-pr.

Prepared head SHA: 7533b85156
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Co-authored-by: shakkernerd <165377636+shakkernerd@users.noreply.github.com>
Reviewed-by: @shakkernerd
This commit is contained in:
Gustavo Madeira Santana
2026-02-23 17:39:48 -05:00
committed by GitHub
parent 29b19455e3
commit eff3c5c707
49 changed files with 3180 additions and 235 deletions

View File

@@ -0,0 +1,38 @@
import { describe, expect, it } from "vitest";
import {
formatSessionArchiveTimestamp,
isPrimarySessionTranscriptFileName,
isSessionArchiveArtifactName,
parseSessionArchiveTimestamp,
} from "./artifacts.js";
describe("session artifact helpers", () => {
it("classifies archived artifact file names", () => {
expect(isSessionArchiveArtifactName("abc.jsonl.deleted.2026-01-01T00-00-00.000Z")).toBe(true);
expect(isSessionArchiveArtifactName("abc.jsonl.reset.2026-01-01T00-00-00.000Z")).toBe(true);
expect(isSessionArchiveArtifactName("abc.jsonl.bak.2026-01-01T00-00-00.000Z")).toBe(true);
expect(isSessionArchiveArtifactName("sessions.json.bak.1737420882")).toBe(true);
expect(isSessionArchiveArtifactName("keep.deleted.keep.jsonl")).toBe(false);
expect(isSessionArchiveArtifactName("abc.jsonl")).toBe(false);
});
it("classifies primary transcript files", () => {
expect(isPrimarySessionTranscriptFileName("abc.jsonl")).toBe(true);
expect(isPrimarySessionTranscriptFileName("keep.deleted.keep.jsonl")).toBe(true);
expect(isPrimarySessionTranscriptFileName("abc.jsonl.deleted.2026-01-01T00-00-00.000Z")).toBe(
false,
);
expect(isPrimarySessionTranscriptFileName("sessions.json")).toBe(false);
});
it("formats and parses archive timestamps", () => {
const now = Date.parse("2026-02-23T12:34:56.000Z");
const stamp = formatSessionArchiveTimestamp(now);
expect(stamp).toBe("2026-02-23T12-34-56.000Z");
const file = `abc.jsonl.deleted.${stamp}`;
expect(parseSessionArchiveTimestamp(file, "deleted")).toBe(now);
expect(parseSessionArchiveTimestamp(file, "reset")).toBeNull();
expect(parseSessionArchiveTimestamp("keep.deleted.keep.jsonl", "deleted")).toBeNull();
});
});

View File

@@ -0,0 +1,67 @@
export type SessionArchiveReason = "bak" | "reset" | "deleted";
const ARCHIVE_TIMESTAMP_RE = /^\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}(?:\.\d{3})?Z$/;
const LEGACY_STORE_BACKUP_RE = /^sessions\.json\.bak\.\d+$/;
function hasArchiveSuffix(fileName: string, reason: SessionArchiveReason): boolean {
const marker = `.${reason}.`;
const index = fileName.lastIndexOf(marker);
if (index < 0) {
return false;
}
const raw = fileName.slice(index + marker.length);
return ARCHIVE_TIMESTAMP_RE.test(raw);
}
export function isSessionArchiveArtifactName(fileName: string): boolean {
if (LEGACY_STORE_BACKUP_RE.test(fileName)) {
return true;
}
return (
hasArchiveSuffix(fileName, "deleted") ||
hasArchiveSuffix(fileName, "reset") ||
hasArchiveSuffix(fileName, "bak")
);
}
export function isPrimarySessionTranscriptFileName(fileName: string): boolean {
if (fileName === "sessions.json") {
return false;
}
if (!fileName.endsWith(".jsonl")) {
return false;
}
return !isSessionArchiveArtifactName(fileName);
}
export function formatSessionArchiveTimestamp(nowMs = Date.now()): string {
return new Date(nowMs).toISOString().replaceAll(":", "-");
}
function restoreSessionArchiveTimestamp(raw: string): string {
const [datePart, timePart] = raw.split("T");
if (!datePart || !timePart) {
return raw;
}
return `${datePart}T${timePart.replace(/-/g, ":")}`;
}
export function parseSessionArchiveTimestamp(
fileName: string,
reason: SessionArchiveReason,
): number | null {
const marker = `.${reason}.`;
const index = fileName.lastIndexOf(marker);
if (index < 0) {
return null;
}
const raw = fileName.slice(index + marker.length);
if (!raw) {
return null;
}
if (!ARCHIVE_TIMESTAMP_RE.test(raw)) {
return null;
}
const timestamp = Date.parse(restoreSessionArchiveTimestamp(raw));
return Number.isNaN(timestamp) ? null : timestamp;
}

View File

@@ -0,0 +1,95 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import { formatSessionArchiveTimestamp } from "./artifacts.js";
import { enforceSessionDiskBudget } from "./disk-budget.js";
import type { SessionEntry } from "./types.js";
const createdDirs: string[] = [];
async function createCaseDir(prefix: string): Promise<string> {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix));
createdDirs.push(dir);
return dir;
}
afterEach(async () => {
await Promise.all(createdDirs.map((dir) => fs.rm(dir, { recursive: true, force: true })));
createdDirs.length = 0;
});
describe("enforceSessionDiskBudget", () => {
it("does not treat referenced transcripts with marker-like session IDs as archived artifacts", async () => {
const dir = await createCaseDir("openclaw-disk-budget-");
const storePath = path.join(dir, "sessions.json");
const sessionId = "keep.deleted.keep";
const activeKey = "agent:main:main";
const transcriptPath = path.join(dir, `${sessionId}.jsonl`);
const store: Record<string, SessionEntry> = {
[activeKey]: {
sessionId,
updatedAt: Date.now(),
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(transcriptPath, "x".repeat(256), "utf-8");
const result = await enforceSessionDiskBudget({
store,
storePath,
activeSessionKey: activeKey,
maintenance: {
maxDiskBytes: 150,
highWaterBytes: 100,
},
warnOnly: false,
});
await expect(fs.stat(transcriptPath)).resolves.toBeDefined();
expect(result).toEqual(
expect.objectContaining({
removedFiles: 0,
}),
);
});
it("removes true archived transcript artifacts while preserving referenced primary transcripts", async () => {
const dir = await createCaseDir("openclaw-disk-budget-");
const storePath = path.join(dir, "sessions.json");
const sessionId = "keep";
const transcriptPath = path.join(dir, `${sessionId}.jsonl`);
const archivePath = path.join(
dir,
`old-session.jsonl.deleted.${formatSessionArchiveTimestamp(Date.now() - 24 * 60 * 60 * 1000)}`,
);
const store: Record<string, SessionEntry> = {
"agent:main:main": {
sessionId,
updatedAt: Date.now(),
},
};
await fs.writeFile(storePath, JSON.stringify(store, null, 2), "utf-8");
await fs.writeFile(transcriptPath, "k".repeat(80), "utf-8");
await fs.writeFile(archivePath, "a".repeat(260), "utf-8");
const result = await enforceSessionDiskBudget({
store,
storePath,
maintenance: {
maxDiskBytes: 300,
highWaterBytes: 220,
},
warnOnly: false,
});
await expect(fs.stat(transcriptPath)).resolves.toBeDefined();
await expect(fs.stat(archivePath)).rejects.toThrow();
expect(result).toEqual(
expect.objectContaining({
removedFiles: 1,
removedEntries: 0,
}),
);
});
});

View File

@@ -0,0 +1,375 @@
import fs from "node:fs";
import path from "node:path";
import { isPrimarySessionTranscriptFileName, isSessionArchiveArtifactName } from "./artifacts.js";
import { resolveSessionFilePath } from "./paths.js";
import type { SessionEntry } from "./types.js";
export type SessionDiskBudgetConfig = {
maxDiskBytes: number | null;
highWaterBytes: number | null;
};
export type SessionDiskBudgetSweepResult = {
totalBytesBefore: number;
totalBytesAfter: number;
removedFiles: number;
removedEntries: number;
freedBytes: number;
maxBytes: number;
highWaterBytes: number;
overBudget: boolean;
};
export type SessionDiskBudgetLogger = {
warn: (message: string, context?: Record<string, unknown>) => void;
info: (message: string, context?: Record<string, unknown>) => void;
};
const NOOP_LOGGER: SessionDiskBudgetLogger = {
warn: () => {},
info: () => {},
};
type SessionsDirFileStat = {
path: string;
canonicalPath: string;
name: string;
size: number;
mtimeMs: number;
};
function canonicalizePathForComparison(filePath: string): string {
const resolved = path.resolve(filePath);
try {
return fs.realpathSync(resolved);
} catch {
return resolved;
}
}
function measureStoreBytes(store: Record<string, SessionEntry>): number {
return Buffer.byteLength(JSON.stringify(store, null, 2), "utf-8");
}
function measureStoreEntryChunkBytes(key: string, entry: SessionEntry): number {
const singleEntryStore = JSON.stringify({ [key]: entry }, null, 2);
if (!singleEntryStore.startsWith("{\n") || !singleEntryStore.endsWith("\n}")) {
return measureStoreBytes({ [key]: entry }) - 4;
}
const chunk = singleEntryStore.slice(2, -2);
return Buffer.byteLength(chunk, "utf-8");
}
function buildStoreEntryChunkSizeMap(store: Record<string, SessionEntry>): Map<string, number> {
const out = new Map<string, number>();
for (const [key, entry] of Object.entries(store)) {
out.set(key, measureStoreEntryChunkBytes(key, entry));
}
return out;
}
function getEntryUpdatedAt(entry?: SessionEntry): number {
if (!entry) {
return 0;
}
const updatedAt = entry.updatedAt;
return Number.isFinite(updatedAt) ? updatedAt : 0;
}
function buildSessionIdRefCounts(store: Record<string, SessionEntry>): Map<string, number> {
const counts = new Map<string, number>();
for (const entry of Object.values(store)) {
const sessionId = entry?.sessionId;
if (!sessionId) {
continue;
}
counts.set(sessionId, (counts.get(sessionId) ?? 0) + 1);
}
return counts;
}
function resolveSessionTranscriptPathForEntry(params: {
sessionsDir: string;
entry: SessionEntry;
}): string | null {
if (!params.entry.sessionId) {
return null;
}
try {
const resolved = resolveSessionFilePath(params.entry.sessionId, params.entry, {
sessionsDir: params.sessionsDir,
});
const resolvedSessionsDir = canonicalizePathForComparison(params.sessionsDir);
const resolvedPath = canonicalizePathForComparison(resolved);
const relative = path.relative(resolvedSessionsDir, resolvedPath);
if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) {
return null;
}
return resolvedPath;
} catch {
return null;
}
}
function resolveReferencedSessionTranscriptPaths(params: {
sessionsDir: string;
store: Record<string, SessionEntry>;
}): Set<string> {
const referenced = new Set<string>();
for (const entry of Object.values(params.store)) {
const resolved = resolveSessionTranscriptPathForEntry({
sessionsDir: params.sessionsDir,
entry,
});
if (resolved) {
referenced.add(canonicalizePathForComparison(resolved));
}
}
return referenced;
}
async function readSessionsDirFiles(sessionsDir: string): Promise<SessionsDirFileStat[]> {
const dirEntries = await fs.promises
.readdir(sessionsDir, { withFileTypes: true })
.catch(() => []);
const files: SessionsDirFileStat[] = [];
for (const dirent of dirEntries) {
if (!dirent.isFile()) {
continue;
}
const filePath = path.join(sessionsDir, dirent.name);
const stat = await fs.promises.stat(filePath).catch(() => null);
if (!stat?.isFile()) {
continue;
}
files.push({
path: filePath,
canonicalPath: canonicalizePathForComparison(filePath),
name: dirent.name,
size: stat.size,
mtimeMs: stat.mtimeMs,
});
}
return files;
}
async function removeFileIfExists(filePath: string): Promise<number> {
const stat = await fs.promises.stat(filePath).catch(() => null);
if (!stat?.isFile()) {
return 0;
}
await fs.promises.rm(filePath, { force: true }).catch(() => undefined);
return stat.size;
}
async function removeFileForBudget(params: {
filePath: string;
canonicalPath?: string;
dryRun: boolean;
fileSizesByPath: Map<string, number>;
simulatedRemovedPaths: Set<string>;
}): Promise<number> {
const resolvedPath = path.resolve(params.filePath);
const canonicalPath = params.canonicalPath ?? canonicalizePathForComparison(resolvedPath);
if (params.dryRun) {
if (params.simulatedRemovedPaths.has(canonicalPath)) {
return 0;
}
const size = params.fileSizesByPath.get(canonicalPath) ?? 0;
if (size <= 0) {
return 0;
}
params.simulatedRemovedPaths.add(canonicalPath);
return size;
}
return removeFileIfExists(resolvedPath);
}
export async function enforceSessionDiskBudget(params: {
store: Record<string, SessionEntry>;
storePath: string;
activeSessionKey?: string;
maintenance: SessionDiskBudgetConfig;
warnOnly: boolean;
dryRun?: boolean;
log?: SessionDiskBudgetLogger;
}): Promise<SessionDiskBudgetSweepResult | null> {
const maxBytes = params.maintenance.maxDiskBytes;
const highWaterBytes = params.maintenance.highWaterBytes;
if (maxBytes == null || highWaterBytes == null) {
return null;
}
const log = params.log ?? NOOP_LOGGER;
const dryRun = params.dryRun === true;
const sessionsDir = path.dirname(params.storePath);
const files = await readSessionsDirFiles(sessionsDir);
const fileSizesByPath = new Map(files.map((file) => [file.canonicalPath, file.size]));
const simulatedRemovedPaths = new Set<string>();
const resolvedStorePath = canonicalizePathForComparison(params.storePath);
const storeFile = files.find((file) => file.canonicalPath === resolvedStorePath);
let projectedStoreBytes = measureStoreBytes(params.store);
let total =
files.reduce((sum, file) => sum + file.size, 0) - (storeFile?.size ?? 0) + projectedStoreBytes;
const totalBefore = total;
if (total <= maxBytes) {
return {
totalBytesBefore: totalBefore,
totalBytesAfter: total,
removedFiles: 0,
removedEntries: 0,
freedBytes: 0,
maxBytes,
highWaterBytes,
overBudget: false,
};
}
if (params.warnOnly) {
log.warn("session disk budget exceeded (warn-only mode)", {
sessionsDir,
totalBytes: total,
maxBytes,
highWaterBytes,
});
return {
totalBytesBefore: totalBefore,
totalBytesAfter: total,
removedFiles: 0,
removedEntries: 0,
freedBytes: 0,
maxBytes,
highWaterBytes,
overBudget: true,
};
}
let removedFiles = 0;
let removedEntries = 0;
let freedBytes = 0;
const referencedPaths = resolveReferencedSessionTranscriptPaths({
sessionsDir,
store: params.store,
});
const removableFileQueue = files
.filter(
(file) =>
isSessionArchiveArtifactName(file.name) ||
(isPrimarySessionTranscriptFileName(file.name) && !referencedPaths.has(file.canonicalPath)),
)
.toSorted((a, b) => a.mtimeMs - b.mtimeMs);
for (const file of removableFileQueue) {
if (total <= highWaterBytes) {
break;
}
const deletedBytes = await removeFileForBudget({
filePath: file.path,
canonicalPath: file.canonicalPath,
dryRun,
fileSizesByPath,
simulatedRemovedPaths,
});
if (deletedBytes <= 0) {
continue;
}
total -= deletedBytes;
freedBytes += deletedBytes;
removedFiles += 1;
}
if (total > highWaterBytes) {
const activeSessionKey = params.activeSessionKey?.trim().toLowerCase();
const sessionIdRefCounts = buildSessionIdRefCounts(params.store);
const entryChunkBytesByKey = buildStoreEntryChunkSizeMap(params.store);
const keys = Object.keys(params.store).toSorted((a, b) => {
const aTime = getEntryUpdatedAt(params.store[a]);
const bTime = getEntryUpdatedAt(params.store[b]);
return aTime - bTime;
});
for (const key of keys) {
if (total <= highWaterBytes) {
break;
}
if (activeSessionKey && key.trim().toLowerCase() === activeSessionKey) {
continue;
}
const entry = params.store[key];
if (!entry) {
continue;
}
const previousProjectedBytes = projectedStoreBytes;
delete params.store[key];
const chunkBytes = entryChunkBytesByKey.get(key);
entryChunkBytesByKey.delete(key);
if (typeof chunkBytes === "number" && Number.isFinite(chunkBytes) && chunkBytes >= 0) {
// Removing any one pretty-printed top-level entry always removes the entry chunk plus ",\n" (2 bytes).
projectedStoreBytes = Math.max(2, projectedStoreBytes - (chunkBytes + 2));
} else {
projectedStoreBytes = measureStoreBytes(params.store);
}
total += projectedStoreBytes - previousProjectedBytes;
removedEntries += 1;
const sessionId = entry.sessionId;
if (!sessionId) {
continue;
}
const nextRefCount = (sessionIdRefCounts.get(sessionId) ?? 1) - 1;
if (nextRefCount > 0) {
sessionIdRefCounts.set(sessionId, nextRefCount);
continue;
}
sessionIdRefCounts.delete(sessionId);
const transcriptPath = resolveSessionTranscriptPathForEntry({ sessionsDir, entry });
if (!transcriptPath) {
continue;
}
const deletedBytes = await removeFileForBudget({
filePath: transcriptPath,
dryRun,
fileSizesByPath,
simulatedRemovedPaths,
});
if (deletedBytes <= 0) {
continue;
}
total -= deletedBytes;
freedBytes += deletedBytes;
removedFiles += 1;
}
}
if (!dryRun) {
if (total > highWaterBytes) {
log.warn("session disk budget still above high-water target after cleanup", {
sessionsDir,
totalBytes: total,
maxBytes,
highWaterBytes,
removedFiles,
removedEntries,
});
} else if (removedFiles > 0 || removedEntries > 0) {
log.info("applied session disk budget cleanup", {
sessionsDir,
totalBytesBefore: totalBefore,
totalBytesAfter: total,
maxBytes,
highWaterBytes,
removedFiles,
removedEntries,
});
}
}
return {
totalBytesBefore: totalBefore,
totalBytesAfter: total,
removedFiles,
removedEntries,
freedBytes,
maxBytes,
highWaterBytes,
overBudget: true,
};
}

View File

@@ -159,6 +159,40 @@ describe("Integration: saveSessionStore with pruning", () => {
await expect(fs.stat(bakArchived)).resolves.toBeDefined();
});
it("cleans up reset archives using resetArchiveRetention", async () => {
mockLoadConfig.mockReturnValue({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "30d",
resetArchiveRetention: "3d",
maxEntries: 500,
rotateBytes: 10_485_760,
},
},
});
const now = Date.now();
const store: Record<string, SessionEntry> = {
fresh: { sessionId: "fresh-session", updatedAt: now },
};
const oldReset = path.join(
testDir,
`old-reset.jsonl.reset.${archiveTimestamp(now - 10 * DAY_MS)}`,
);
const freshReset = path.join(
testDir,
`fresh-reset.jsonl.reset.${archiveTimestamp(now - 1 * DAY_MS)}`,
);
await fs.writeFile(oldReset, "old", "utf-8");
await fs.writeFile(freshReset, "fresh", "utf-8");
await saveSessionStore(storePath, store);
await expect(fs.stat(oldReset)).rejects.toThrow();
await expect(fs.stat(freshReset)).resolves.toBeDefined();
});
it("saveSessionStore skips enforcement when maintenance mode is warn", async () => {
mockLoadConfig.mockReturnValue({
session: {
@@ -180,4 +214,181 @@ describe("Integration: saveSessionStore with pruning", () => {
expect(loaded.fresh).toBeDefined();
expect(Object.keys(loaded)).toHaveLength(2);
});
it("archives transcript files for entries evicted by maxEntries capping", async () => {
mockLoadConfig.mockReturnValue({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "365d",
maxEntries: 1,
rotateBytes: 10_485_760,
},
},
});
const now = Date.now();
const oldestSessionId = "oldest-session";
const newestSessionId = "newest-session";
const store: Record<string, SessionEntry> = {
oldest: { sessionId: oldestSessionId, updatedAt: now - DAY_MS },
newest: { sessionId: newestSessionId, updatedAt: now },
};
const oldestTranscript = path.join(testDir, `${oldestSessionId}.jsonl`);
const newestTranscript = path.join(testDir, `${newestSessionId}.jsonl`);
await fs.writeFile(oldestTranscript, '{"type":"session"}\n', "utf-8");
await fs.writeFile(newestTranscript, '{"type":"session"}\n', "utf-8");
await saveSessionStore(storePath, store);
const loaded = loadSessionStore(storePath);
expect(loaded.oldest).toBeUndefined();
expect(loaded.newest).toBeDefined();
await expect(fs.stat(oldestTranscript)).rejects.toThrow();
await expect(fs.stat(newestTranscript)).resolves.toBeDefined();
const files = await fs.readdir(testDir);
expect(files.some((name) => name.startsWith(`${oldestSessionId}.jsonl.deleted.`))).toBe(true);
});
it("does not archive external transcript paths when capping entries", async () => {
mockLoadConfig.mockReturnValue({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "365d",
maxEntries: 1,
rotateBytes: 10_485_760,
},
},
});
const now = Date.now();
const externalDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-external-cap-"));
const externalTranscript = path.join(externalDir, "outside.jsonl");
await fs.writeFile(externalTranscript, "external", "utf-8");
const store: Record<string, SessionEntry> = {
oldest: {
sessionId: "outside",
sessionFile: externalTranscript,
updatedAt: now - DAY_MS,
},
newest: { sessionId: "inside", updatedAt: now },
};
await fs.writeFile(path.join(testDir, "inside.jsonl"), '{"type":"session"}\n', "utf-8");
try {
await saveSessionStore(storePath, store);
const loaded = loadSessionStore(storePath);
expect(loaded.oldest).toBeUndefined();
expect(loaded.newest).toBeDefined();
await expect(fs.stat(externalTranscript)).resolves.toBeDefined();
} finally {
await fs.rm(externalDir, { recursive: true, force: true });
}
});
it("enforces maxDiskBytes with oldest-first session eviction", async () => {
mockLoadConfig.mockReturnValue({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "365d",
maxEntries: 100,
rotateBytes: 10_485_760,
maxDiskBytes: 900,
highWaterBytes: 700,
},
},
});
const now = Date.now();
const oldSessionId = "old-disk-session";
const newSessionId = "new-disk-session";
const store: Record<string, SessionEntry> = {
old: { sessionId: oldSessionId, updatedAt: now - DAY_MS },
recent: { sessionId: newSessionId, updatedAt: now },
};
await fs.writeFile(path.join(testDir, `${oldSessionId}.jsonl`), "x".repeat(500), "utf-8");
await fs.writeFile(path.join(testDir, `${newSessionId}.jsonl`), "y".repeat(500), "utf-8");
await saveSessionStore(storePath, store);
const loaded = loadSessionStore(storePath);
expect(Object.keys(loaded).length).toBe(1);
expect(loaded.recent).toBeDefined();
await expect(fs.stat(path.join(testDir, `${oldSessionId}.jsonl`))).rejects.toThrow();
await expect(fs.stat(path.join(testDir, `${newSessionId}.jsonl`))).resolves.toBeDefined();
});
it("uses projected sessions.json size to avoid over-eviction", async () => {
mockLoadConfig.mockReturnValue({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "365d",
maxEntries: 100,
rotateBytes: 10_485_760,
maxDiskBytes: 900,
highWaterBytes: 700,
},
},
});
// Simulate a stale oversized on-disk sessions.json from a previous write.
await fs.writeFile(storePath, JSON.stringify({ noisy: "x".repeat(10_000) }), "utf-8");
const now = Date.now();
const store: Record<string, SessionEntry> = {
older: { sessionId: "older", updatedAt: now - DAY_MS },
newer: { sessionId: "newer", updatedAt: now },
};
await fs.writeFile(path.join(testDir, "older.jsonl"), "x".repeat(80), "utf-8");
await fs.writeFile(path.join(testDir, "newer.jsonl"), "y".repeat(80), "utf-8");
await saveSessionStore(storePath, store);
const loaded = loadSessionStore(storePath);
expect(loaded.older).toBeDefined();
expect(loaded.newer).toBeDefined();
});
it("never deletes transcripts outside the agent sessions directory during budget cleanup", async () => {
mockLoadConfig.mockReturnValue({
session: {
maintenance: {
mode: "enforce",
pruneAfter: "365d",
maxEntries: 100,
rotateBytes: 10_485_760,
maxDiskBytes: 500,
highWaterBytes: 300,
},
},
});
const now = Date.now();
const externalDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-external-session-"));
const externalTranscript = path.join(externalDir, "outside.jsonl");
await fs.writeFile(externalTranscript, "z".repeat(400), "utf-8");
const store: Record<string, SessionEntry> = {
older: {
sessionId: "outside",
sessionFile: externalTranscript,
updatedAt: now - DAY_MS,
},
newer: {
sessionId: "inside",
updatedAt: now,
},
};
await fs.writeFile(path.join(testDir, "inside.jsonl"), "i".repeat(400), "utf-8");
try {
await saveSessionStore(storePath, store);
await expect(fs.stat(externalTranscript)).resolves.toBeDefined();
} finally {
await fs.rm(externalDir, { recursive: true, force: true });
}
});
});

View File

@@ -20,6 +20,7 @@ import {
import { getFileMtimeMs, isCacheEnabled, resolveCacheTtlMs } from "../cache-utils.js";
import { loadConfig } from "../config.js";
import type { SessionMaintenanceConfig, SessionMaintenanceMode } from "../types.base.js";
import { enforceSessionDiskBudget, type SessionDiskBudgetSweepResult } from "./disk-budget.js";
import { deriveSessionMetaPatch } from "./metadata.js";
import { mergeSessionEntry, type SessionEntry } from "./types.js";
@@ -299,6 +300,7 @@ const DEFAULT_SESSION_PRUNE_AFTER_MS = 30 * 24 * 60 * 60 * 1000;
const DEFAULT_SESSION_MAX_ENTRIES = 500;
const DEFAULT_SESSION_ROTATE_BYTES = 10_485_760; // 10 MB
const DEFAULT_SESSION_MAINTENANCE_MODE: SessionMaintenanceMode = "warn";
const DEFAULT_SESSION_DISK_BUDGET_HIGH_WATER_RATIO = 0.8;
export type SessionMaintenanceWarning = {
activeSessionKey: string;
@@ -310,11 +312,23 @@ export type SessionMaintenanceWarning = {
wouldCap: boolean;
};
export type SessionMaintenanceApplyReport = {
mode: SessionMaintenanceMode;
beforeCount: number;
afterCount: number;
pruned: number;
capped: number;
diskBudget: SessionDiskBudgetSweepResult | null;
};
type ResolvedSessionMaintenanceConfig = {
mode: SessionMaintenanceMode;
pruneAfterMs: number;
maxEntries: number;
rotateBytes: number;
resetArchiveRetentionMs: number | null;
maxDiskBytes: number | null;
highWaterBytes: number | null;
};
function resolvePruneAfterMs(maintenance?: SessionMaintenanceConfig): number {
@@ -341,6 +355,70 @@ function resolveRotateBytes(maintenance?: SessionMaintenanceConfig): number {
}
}
function resolveResetArchiveRetentionMs(
maintenance: SessionMaintenanceConfig | undefined,
pruneAfterMs: number,
): number | null {
const raw = maintenance?.resetArchiveRetention;
if (raw === false) {
return null;
}
if (raw === undefined || raw === null || raw === "") {
return pruneAfterMs;
}
try {
return parseDurationMs(String(raw).trim(), { defaultUnit: "d" });
} catch {
return pruneAfterMs;
}
}
function resolveMaxDiskBytes(maintenance?: SessionMaintenanceConfig): number | null {
const raw = maintenance?.maxDiskBytes;
if (raw === undefined || raw === null || raw === "") {
return null;
}
try {
return parseByteSize(String(raw).trim(), { defaultUnit: "b" });
} catch {
return null;
}
}
function resolveHighWaterBytes(
maintenance: SessionMaintenanceConfig | undefined,
maxDiskBytes: number | null,
): number | null {
const computeDefault = () => {
if (maxDiskBytes == null) {
return null;
}
if (maxDiskBytes <= 0) {
return 0;
}
return Math.max(
1,
Math.min(
maxDiskBytes,
Math.floor(maxDiskBytes * DEFAULT_SESSION_DISK_BUDGET_HIGH_WATER_RATIO),
),
);
};
if (maxDiskBytes == null) {
return null;
}
const raw = maintenance?.highWaterBytes;
if (raw === undefined || raw === null || raw === "") {
return computeDefault();
}
try {
const parsed = parseByteSize(String(raw).trim(), { defaultUnit: "b" });
return Math.min(parsed, maxDiskBytes);
} catch {
return computeDefault();
}
}
/**
* Resolve maintenance settings from openclaw.json (`session.maintenance`).
* Falls back to built-in defaults when config is missing or unset.
@@ -352,11 +430,16 @@ export function resolveMaintenanceConfig(): ResolvedSessionMaintenanceConfig {
} catch {
// Config may not be available (e.g. in tests). Use defaults.
}
const pruneAfterMs = resolvePruneAfterMs(maintenance);
const maxDiskBytes = resolveMaxDiskBytes(maintenance);
return {
mode: maintenance?.mode ?? DEFAULT_SESSION_MAINTENANCE_MODE,
pruneAfterMs: resolvePruneAfterMs(maintenance),
pruneAfterMs,
maxEntries: maintenance?.maxEntries ?? DEFAULT_SESSION_MAX_ENTRIES,
rotateBytes: resolveRotateBytes(maintenance),
resetArchiveRetentionMs: resolveResetArchiveRetentionMs(maintenance, pruneAfterMs),
maxDiskBytes,
highWaterBytes: resolveHighWaterBytes(maintenance, maxDiskBytes),
};
}
@@ -439,7 +522,10 @@ export function getActiveSessionMaintenanceWarning(params: {
export function capEntryCount(
store: Record<string, SessionEntry>,
overrideMax?: number,
opts: { log?: boolean } = {},
opts: {
log?: boolean;
onCapped?: (params: { key: string; entry: SessionEntry }) => void;
} = {},
): number {
const maxEntries = overrideMax ?? resolveMaintenanceConfig().maxEntries;
const keys = Object.keys(store);
@@ -456,6 +542,10 @@ export function capEntryCount(
const toRemove = sorted.slice(maxEntries);
for (const key of toRemove) {
const entry = store[key];
if (entry) {
opts.onCapped?.({ key, entry });
}
delete store[key];
}
if (opts.log !== false) {
@@ -539,6 +629,10 @@ type SaveSessionStoreOptions = {
activeSessionKey?: string;
/** Optional callback for warn-only maintenance. */
onWarn?: (warning: SessionMaintenanceWarning) => void | Promise<void>;
/** Optional callback with maintenance stats after a save. */
onMaintenanceApplied?: (report: SessionMaintenanceApplyReport) => void | Promise<void>;
/** Optional overrides used by maintenance commands. */
maintenanceOverride?: Partial<ResolvedSessionMaintenanceConfig>;
};
async function saveSessionStoreUnlocked(
@@ -553,8 +647,9 @@ async function saveSessionStoreUnlocked(
if (!opts?.skipMaintenance) {
// Resolve maintenance config once (avoids repeated loadConfig() calls).
const maintenance = resolveMaintenanceConfig();
const maintenance = { ...resolveMaintenanceConfig(), ...opts?.maintenanceOverride };
const shouldWarnOnly = maintenance.mode === "warn";
const beforeCount = Object.keys(store).length;
if (shouldWarnOnly) {
const activeSessionKey = opts?.activeSessionKey?.trim();
@@ -576,39 +671,96 @@ async function saveSessionStoreUnlocked(
await opts?.onWarn?.(warning);
}
}
const diskBudget = await enforceSessionDiskBudget({
store,
storePath,
activeSessionKey: opts?.activeSessionKey,
maintenance,
warnOnly: true,
log,
});
await opts?.onMaintenanceApplied?.({
mode: maintenance.mode,
beforeCount,
afterCount: Object.keys(store).length,
pruned: 0,
capped: 0,
diskBudget,
});
} else {
// Prune stale entries and cap total count before serializing.
const prunedSessionFiles = new Map<string, string | undefined>();
pruneStaleEntries(store, maintenance.pruneAfterMs, {
const removedSessionFiles = new Map<string, string | undefined>();
const pruned = pruneStaleEntries(store, maintenance.pruneAfterMs, {
onPruned: ({ entry }) => {
if (!prunedSessionFiles.has(entry.sessionId) || entry.sessionFile) {
prunedSessionFiles.set(entry.sessionId, entry.sessionFile);
if (!removedSessionFiles.has(entry.sessionId) || entry.sessionFile) {
removedSessionFiles.set(entry.sessionId, entry.sessionFile);
}
},
});
const capped = capEntryCount(store, maintenance.maxEntries, {
onCapped: ({ entry }) => {
if (!removedSessionFiles.has(entry.sessionId) || entry.sessionFile) {
removedSessionFiles.set(entry.sessionId, entry.sessionFile);
}
},
});
capEntryCount(store, maintenance.maxEntries);
const archivedDirs = new Set<string>();
for (const [sessionId, sessionFile] of prunedSessionFiles) {
const referencedSessionIds = new Set(
Object.values(store)
.map((entry) => entry?.sessionId)
.filter((id): id is string => Boolean(id)),
);
for (const [sessionId, sessionFile] of removedSessionFiles) {
if (referencedSessionIds.has(sessionId)) {
continue;
}
const archived = archiveSessionTranscripts({
sessionId,
storePath,
sessionFile,
reason: "deleted",
restrictToStoreDir: true,
});
for (const archivedPath of archived) {
archivedDirs.add(path.dirname(archivedPath));
}
}
if (archivedDirs.size > 0) {
if (archivedDirs.size > 0 || maintenance.resetArchiveRetentionMs != null) {
const targetDirs =
archivedDirs.size > 0 ? [...archivedDirs] : [path.dirname(path.resolve(storePath))];
await cleanupArchivedSessionTranscripts({
directories: [...archivedDirs],
directories: targetDirs,
olderThanMs: maintenance.pruneAfterMs,
reason: "deleted",
});
if (maintenance.resetArchiveRetentionMs != null) {
await cleanupArchivedSessionTranscripts({
directories: targetDirs,
olderThanMs: maintenance.resetArchiveRetentionMs,
reason: "reset",
});
}
}
// Rotate the on-disk file if it exceeds the size threshold.
await rotateSessionFile(storePath, maintenance.rotateBytes);
const diskBudget = await enforceSessionDiskBudget({
store,
storePath,
activeSessionKey: opts?.activeSessionKey,
maintenance,
warnOnly: false,
log,
});
await opts?.onMaintenanceApplied?.({
mode: maintenance.mode,
beforeCount,
afterCount: Object.keys(store).length,
pruned,
capped,
diskBudget,
});
}
}