mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-07 21:21:24 +00:00
Web UI: add full cron edit parity, all-jobs run history, and compact filters (openclaw#24155) thanks @Takhoffman
Verified: - pnpm install --frozen-lockfile - pnpm build - pnpm check - pnpm test:macmini Co-authored-by: Takhoffman <781889+Takhoffman@users.noreply.github.com> Co-authored-by: Tak Hoffman <781889+Takhoffman@users.noreply.github.com>
This commit is contained in:
@@ -19,6 +19,35 @@ export type CronRunLogEntry = {
|
||||
nextRunAtMs?: number;
|
||||
} & CronRunTelemetry;
|
||||
|
||||
export type CronRunLogSortDir = "asc" | "desc";
|
||||
export type CronRunLogStatusFilter = "all" | "ok" | "error" | "skipped";
|
||||
|
||||
export type ReadCronRunLogPageOptions = {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
jobId?: string;
|
||||
status?: CronRunLogStatusFilter;
|
||||
statuses?: CronRunStatus[];
|
||||
deliveryStatus?: CronDeliveryStatus;
|
||||
deliveryStatuses?: CronDeliveryStatus[];
|
||||
query?: string;
|
||||
sortDir?: CronRunLogSortDir;
|
||||
};
|
||||
|
||||
export type CronRunLogPageResult = {
|
||||
entries: CronRunLogEntry[];
|
||||
total: number;
|
||||
offset: number;
|
||||
limit: number;
|
||||
hasMore: boolean;
|
||||
nextOffset: number | null;
|
||||
};
|
||||
|
||||
type ReadCronRunLogAllPageOptions = Omit<ReadCronRunLogPageOptions, "jobId"> & {
|
||||
storePath: string;
|
||||
jobNameById?: Record<string, string>;
|
||||
};
|
||||
|
||||
function assertSafeCronRunLogJobId(jobId: string): string {
|
||||
const trimmed = jobId.trim();
|
||||
if (!trimmed) {
|
||||
@@ -98,14 +127,78 @@ export async function readCronRunLogEntries(
|
||||
opts?: { limit?: number; jobId?: string },
|
||||
): Promise<CronRunLogEntry[]> {
|
||||
const limit = Math.max(1, Math.min(5000, Math.floor(opts?.limit ?? 200)));
|
||||
const page = await readCronRunLogEntriesPage(filePath, {
|
||||
jobId: opts?.jobId,
|
||||
limit,
|
||||
offset: 0,
|
||||
status: "all",
|
||||
sortDir: "desc",
|
||||
});
|
||||
return page.entries.toReversed();
|
||||
}
|
||||
|
||||
function normalizeRunStatusFilter(status?: string): CronRunLogStatusFilter {
|
||||
if (status === "ok" || status === "error" || status === "skipped" || status === "all") {
|
||||
return status;
|
||||
}
|
||||
return "all";
|
||||
}
|
||||
|
||||
function normalizeRunStatuses(opts?: {
|
||||
statuses?: CronRunStatus[];
|
||||
status?: CronRunLogStatusFilter;
|
||||
}): CronRunStatus[] | null {
|
||||
if (Array.isArray(opts?.statuses) && opts.statuses.length > 0) {
|
||||
const filtered = opts.statuses.filter(
|
||||
(status): status is CronRunStatus =>
|
||||
status === "ok" || status === "error" || status === "skipped",
|
||||
);
|
||||
if (filtered.length > 0) {
|
||||
return Array.from(new Set(filtered));
|
||||
}
|
||||
}
|
||||
const status = normalizeRunStatusFilter(opts?.status);
|
||||
if (status === "all") {
|
||||
return null;
|
||||
}
|
||||
return [status];
|
||||
}
|
||||
|
||||
function normalizeDeliveryStatuses(opts?: {
|
||||
deliveryStatuses?: CronDeliveryStatus[];
|
||||
deliveryStatus?: CronDeliveryStatus;
|
||||
}): CronDeliveryStatus[] | null {
|
||||
if (Array.isArray(opts?.deliveryStatuses) && opts.deliveryStatuses.length > 0) {
|
||||
const filtered = opts.deliveryStatuses.filter(
|
||||
(status): status is CronDeliveryStatus =>
|
||||
status === "delivered" ||
|
||||
status === "not-delivered" ||
|
||||
status === "unknown" ||
|
||||
status === "not-requested",
|
||||
);
|
||||
if (filtered.length > 0) {
|
||||
return Array.from(new Set(filtered));
|
||||
}
|
||||
}
|
||||
if (
|
||||
opts?.deliveryStatus === "delivered" ||
|
||||
opts?.deliveryStatus === "not-delivered" ||
|
||||
opts?.deliveryStatus === "unknown" ||
|
||||
opts?.deliveryStatus === "not-requested"
|
||||
) {
|
||||
return [opts.deliveryStatus];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseAllRunLogEntries(raw: string, opts?: { jobId?: string }): CronRunLogEntry[] {
|
||||
const jobId = opts?.jobId?.trim() || undefined;
|
||||
const raw = await fs.readFile(path.resolve(filePath), "utf-8").catch(() => "");
|
||||
if (!raw.trim()) {
|
||||
return [];
|
||||
}
|
||||
const parsed: CronRunLogEntry[] = [];
|
||||
const lines = raw.split("\n");
|
||||
for (let i = lines.length - 1; i >= 0 && parsed.length < limit; i--) {
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i]?.trim();
|
||||
if (!line) {
|
||||
continue;
|
||||
@@ -182,5 +275,125 @@ export async function readCronRunLogEntries(
|
||||
// ignore invalid lines
|
||||
}
|
||||
}
|
||||
return parsed.toReversed();
|
||||
return parsed;
|
||||
}
|
||||
|
||||
export async function readCronRunLogEntriesPage(
|
||||
filePath: string,
|
||||
opts?: ReadCronRunLogPageOptions,
|
||||
): Promise<CronRunLogPageResult> {
|
||||
const limit = Math.max(1, Math.min(200, Math.floor(opts?.limit ?? 50)));
|
||||
const raw = await fs.readFile(path.resolve(filePath), "utf-8").catch(() => "");
|
||||
const statuses = normalizeRunStatuses(opts);
|
||||
const deliveryStatuses = normalizeDeliveryStatuses(opts);
|
||||
const query = opts?.query?.trim().toLowerCase() ?? "";
|
||||
const sortDir: CronRunLogSortDir = opts?.sortDir === "asc" ? "asc" : "desc";
|
||||
const all = parseAllRunLogEntries(raw, { jobId: opts?.jobId });
|
||||
const filtered = all.filter((entry) => {
|
||||
if (statuses && (!entry.status || !statuses.includes(entry.status))) {
|
||||
return false;
|
||||
}
|
||||
if (deliveryStatuses) {
|
||||
const deliveryStatus = entry.deliveryStatus ?? "not-requested";
|
||||
if (!deliveryStatuses.includes(deliveryStatus)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!query) {
|
||||
return true;
|
||||
}
|
||||
const haystack = [entry.summary ?? "", entry.error ?? "", entry.jobId].join(" ").toLowerCase();
|
||||
return haystack.includes(query);
|
||||
});
|
||||
const sorted =
|
||||
sortDir === "asc"
|
||||
? filtered.toSorted((a, b) => a.ts - b.ts)
|
||||
: filtered.toSorted((a, b) => b.ts - a.ts);
|
||||
const total = sorted.length;
|
||||
const offset = Math.max(0, Math.min(total, Math.floor(opts?.offset ?? 0)));
|
||||
const entries = sorted.slice(offset, offset + limit);
|
||||
const nextOffset = offset + entries.length;
|
||||
return {
|
||||
entries,
|
||||
total,
|
||||
offset,
|
||||
limit,
|
||||
hasMore: nextOffset < total,
|
||||
nextOffset: nextOffset < total ? nextOffset : null,
|
||||
};
|
||||
}
|
||||
|
||||
export async function readCronRunLogEntriesPageAll(
|
||||
opts: ReadCronRunLogAllPageOptions,
|
||||
): Promise<CronRunLogPageResult> {
|
||||
const limit = Math.max(1, Math.min(200, Math.floor(opts.limit ?? 50)));
|
||||
const statuses = normalizeRunStatuses(opts);
|
||||
const deliveryStatuses = normalizeDeliveryStatuses(opts);
|
||||
const query = opts.query?.trim().toLowerCase() ?? "";
|
||||
const sortDir: CronRunLogSortDir = opts.sortDir === "asc" ? "asc" : "desc";
|
||||
const runsDir = path.resolve(path.dirname(path.resolve(opts.storePath)), "runs");
|
||||
const files = await fs.readdir(runsDir, { withFileTypes: true }).catch(() => []);
|
||||
const jsonlFiles = files
|
||||
.filter((entry) => entry.isFile() && entry.name.endsWith(".jsonl"))
|
||||
.map((entry) => path.join(runsDir, entry.name));
|
||||
if (jsonlFiles.length === 0) {
|
||||
return {
|
||||
entries: [],
|
||||
total: 0,
|
||||
offset: 0,
|
||||
limit,
|
||||
hasMore: false,
|
||||
nextOffset: null,
|
||||
};
|
||||
}
|
||||
const chunks = await Promise.all(
|
||||
jsonlFiles.map(async (filePath) => {
|
||||
const raw = await fs.readFile(filePath, "utf-8").catch(() => "");
|
||||
return parseAllRunLogEntries(raw);
|
||||
}),
|
||||
);
|
||||
const all = chunks.flat();
|
||||
const filtered = all.filter((entry) => {
|
||||
if (statuses && (!entry.status || !statuses.includes(entry.status))) {
|
||||
return false;
|
||||
}
|
||||
if (deliveryStatuses) {
|
||||
const deliveryStatus = entry.deliveryStatus ?? "not-requested";
|
||||
if (!deliveryStatuses.includes(deliveryStatus)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!query) {
|
||||
return true;
|
||||
}
|
||||
const jobName = opts.jobNameById?.[entry.jobId] ?? "";
|
||||
const haystack = [entry.summary ?? "", entry.error ?? "", entry.jobId, jobName]
|
||||
.join(" ")
|
||||
.toLowerCase();
|
||||
return haystack.includes(query);
|
||||
});
|
||||
const sorted =
|
||||
sortDir === "asc"
|
||||
? filtered.toSorted((a, b) => a.ts - b.ts)
|
||||
: filtered.toSorted((a, b) => b.ts - a.ts);
|
||||
const total = sorted.length;
|
||||
const offset = Math.max(0, Math.min(total, Math.floor(opts.offset ?? 0)));
|
||||
const entries = sorted.slice(offset, offset + limit);
|
||||
if (opts.jobNameById) {
|
||||
for (const entry of entries) {
|
||||
const jobName = opts.jobNameById[entry.jobId];
|
||||
if (jobName) {
|
||||
(entry as CronRunLogEntry & { jobName?: string }).jobName = jobName;
|
||||
}
|
||||
}
|
||||
}
|
||||
const nextOffset = offset + entries.length;
|
||||
return {
|
||||
entries,
|
||||
total,
|
||||
offset,
|
||||
limit,
|
||||
hasMore: nextOffset < total,
|
||||
nextOffset: nextOffset < total ? nextOffset : null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -26,6 +26,10 @@ export class CronService {
|
||||
return await ops.list(this.state, opts);
|
||||
}
|
||||
|
||||
async listPage(opts?: ops.CronListPageOptions) {
|
||||
return await ops.listPage(this.state, opts);
|
||||
}
|
||||
|
||||
async add(input: CronJobCreate) {
|
||||
return await ops.add(this.state, input);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { CronJobCreate, CronJobPatch } from "../types.js";
|
||||
import type { CronJob, CronJobCreate, CronJobPatch } from "../types.js";
|
||||
import {
|
||||
applyJobPatch,
|
||||
computeJobNextRunAtMs,
|
||||
@@ -22,6 +22,29 @@ import {
|
||||
wake,
|
||||
} from "./timer.js";
|
||||
|
||||
type CronJobsEnabledFilter = "all" | "enabled" | "disabled";
|
||||
type CronJobsSortBy = "nextRunAtMs" | "updatedAtMs" | "name";
|
||||
type CronSortDir = "asc" | "desc";
|
||||
|
||||
export type CronListPageOptions = {
|
||||
includeDisabled?: boolean;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
query?: string;
|
||||
enabled?: CronJobsEnabledFilter;
|
||||
sortBy?: CronJobsSortBy;
|
||||
sortDir?: CronSortDir;
|
||||
};
|
||||
|
||||
export type CronListPageResult = {
|
||||
jobs: ReturnType<typeof sortJobs>;
|
||||
total: number;
|
||||
offset: number;
|
||||
limit: number;
|
||||
hasMore: boolean;
|
||||
nextOffset: number | null;
|
||||
};
|
||||
|
||||
async function ensureLoadedForRead(state: CronServiceState) {
|
||||
await ensureLoaded(state, { skipRecompute: true });
|
||||
if (!state.store) {
|
||||
@@ -101,6 +124,80 @@ export async function list(state: CronServiceState, opts?: { includeDisabled?: b
|
||||
});
|
||||
}
|
||||
|
||||
function resolveEnabledFilter(opts?: CronListPageOptions): CronJobsEnabledFilter {
|
||||
if (opts?.enabled === "all" || opts?.enabled === "enabled" || opts?.enabled === "disabled") {
|
||||
return opts.enabled;
|
||||
}
|
||||
return opts?.includeDisabled ? "all" : "enabled";
|
||||
}
|
||||
|
||||
function sortJobs(jobs: CronJob[], sortBy: CronJobsSortBy, sortDir: CronSortDir) {
|
||||
const dir = sortDir === "desc" ? -1 : 1;
|
||||
return jobs.toSorted((a, b) => {
|
||||
let cmp = 0;
|
||||
if (sortBy === "name") {
|
||||
cmp = a.name.localeCompare(b.name, undefined, { sensitivity: "base" });
|
||||
} else if (sortBy === "updatedAtMs") {
|
||||
cmp = a.updatedAtMs - b.updatedAtMs;
|
||||
} else {
|
||||
const aNext = a.state.nextRunAtMs;
|
||||
const bNext = b.state.nextRunAtMs;
|
||||
if (typeof aNext === "number" && typeof bNext === "number") {
|
||||
cmp = aNext - bNext;
|
||||
} else if (typeof aNext === "number") {
|
||||
cmp = -1;
|
||||
} else if (typeof bNext === "number") {
|
||||
cmp = 1;
|
||||
} else {
|
||||
cmp = 0;
|
||||
}
|
||||
}
|
||||
if (cmp !== 0) {
|
||||
return cmp * dir;
|
||||
}
|
||||
return a.id.localeCompare(b.id);
|
||||
});
|
||||
}
|
||||
|
||||
export async function listPage(state: CronServiceState, opts?: CronListPageOptions) {
|
||||
return await locked(state, async () => {
|
||||
await ensureLoadedForRead(state);
|
||||
const query = opts?.query?.trim().toLowerCase() ?? "";
|
||||
const enabledFilter = resolveEnabledFilter(opts);
|
||||
const sortBy = opts?.sortBy ?? "nextRunAtMs";
|
||||
const sortDir = opts?.sortDir ?? "asc";
|
||||
const source = state.store?.jobs ?? [];
|
||||
const filtered = source.filter((job) => {
|
||||
if (enabledFilter === "enabled" && !job.enabled) {
|
||||
return false;
|
||||
}
|
||||
if (enabledFilter === "disabled" && job.enabled) {
|
||||
return false;
|
||||
}
|
||||
if (!query) {
|
||||
return true;
|
||||
}
|
||||
const haystack = [job.name, job.description ?? "", job.agentId ?? ""].join(" ").toLowerCase();
|
||||
return haystack.includes(query);
|
||||
});
|
||||
const sorted = sortJobs(filtered, sortBy, sortDir);
|
||||
const total = sorted.length;
|
||||
const offset = Math.max(0, Math.min(total, Math.floor(opts?.offset ?? 0)));
|
||||
const defaultLimit = total === 0 ? 50 : total;
|
||||
const limit = Math.max(1, Math.min(200, Math.floor(opts?.limit ?? defaultLimit)));
|
||||
const jobs = sorted.slice(offset, offset + limit);
|
||||
const nextOffset = offset + jobs.length;
|
||||
return {
|
||||
jobs,
|
||||
total,
|
||||
offset,
|
||||
limit,
|
||||
hasMore: nextOffset < total,
|
||||
nextOffset: nextOffset < total ? nextOffset : null,
|
||||
} satisfies CronListPageResult;
|
||||
});
|
||||
}
|
||||
|
||||
export async function add(state: CronServiceState, input: CronJobCreate) {
|
||||
return await locked(state, async () => {
|
||||
warnIfDisabled(state, "add");
|
||||
|
||||
Reference in New Issue
Block a user