fix: /status shows incorrect context percentage — totalTokens clamped to contextTokens (#15114) (#15133)

Merged via /review-pr -> /prepare-pr -> /merge-pr.

Prepared head SHA: a489669fc7
Co-authored-by: echoVic <16428813+echoVic@users.noreply.github.com>
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Reviewed-by: @gumadeiras
This commit is contained in:
青雲
2026-02-13 12:52:19 +08:00
committed by GitHub
parent b93ad2cd48
commit fd076eb43a
28 changed files with 361 additions and 53 deletions

View File

@@ -151,7 +151,7 @@ describe("runReplyAgent messaging tool suppression", () => {
expect(result).toMatchObject({ text: "hello world!" });
});
it("persists usage even when replies are suppressed", async () => {
it("persists usage fields even when replies are suppressed", async () => {
const storePath = path.join(
await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-session-store-")),
"sessions.json",
@@ -177,7 +177,42 @@ describe("runReplyAgent messaging tool suppression", () => {
expect(result).toBeUndefined();
const store = loadSessionStore(storePath, { skipCache: true });
expect(store[sessionKey]?.totalTokens ?? 0).toBeGreaterThan(0);
expect(store[sessionKey]?.inputTokens).toBe(10);
expect(store[sessionKey]?.outputTokens).toBe(5);
expect(store[sessionKey]?.totalTokens).toBeUndefined();
expect(store[sessionKey]?.totalTokensFresh).toBe(false);
expect(store[sessionKey]?.model).toBe("claude-opus-4-5");
});
it("persists totalTokens from promptTokens when snapshot is available", async () => {
const storePath = path.join(
await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-session-store-")),
"sessions.json",
);
const sessionKey = "main";
const entry: SessionEntry = { sessionId: "session", updatedAt: Date.now() };
await saveSessionStore(storePath, { [sessionKey]: entry });
runEmbeddedPiAgentMock.mockResolvedValueOnce({
payloads: [{ text: "hello world!" }],
messagingToolSentTexts: ["different message"],
messagingToolSentTargets: [{ tool: "slack", provider: "slack", to: "channel:C1" }],
meta: {
agentMeta: {
usage: { input: 10, output: 5 },
promptTokens: 42_000,
model: "claude-opus-4-5",
provider: "anthropic",
},
},
});
const result = await createRun("slack", { storePath, sessionKey });
expect(result).toBeUndefined();
const store = loadSessionStore(storePath, { skipCache: true });
expect(store[sessionKey]?.totalTokens).toBe(42_000);
expect(store[sessionKey]?.totalTokensFresh).toBe(true);
expect(store[sessionKey]?.model).toBe("claude-opus-4-5");
});
});

View File

@@ -6,7 +6,11 @@ import {
isEmbeddedPiRunActive,
waitForEmbeddedPiRunEnd,
} from "../../agents/pi-embedded.js";
import { resolveSessionFilePath, resolveSessionFilePathOptions } from "../../config/sessions.js";
import {
resolveFreshSessionTotalTokens,
resolveSessionFilePath,
resolveSessionFilePathOptions,
} from "../../config/sessions.js";
import { logVerbose } from "../../globals.js";
import { enqueueSystemEvent } from "../../infra/system-events.js";
import { formatContextUsageShort, formatTokenCount } from "../status.js";
@@ -124,12 +128,9 @@ export const handleCompactCommand: CommandHandler = async (params) => {
}
// Use the post-compaction token count for context summary if available
const tokensAfterCompaction = result.result?.tokensAfter;
const totalTokens =
tokensAfterCompaction ??
params.sessionEntry.totalTokens ??
(params.sessionEntry.inputTokens ?? 0) + (params.sessionEntry.outputTokens ?? 0);
const totalTokens = tokensAfterCompaction ?? resolveFreshSessionTotalTokens(params.sessionEntry);
const contextSummary = formatContextUsageShort(
totalTokens > 0 ? totalTokens : null,
typeof totalTokens === "number" && totalTokens > 0 ? totalTokens : null,
params.contextTokens ?? params.sessionEntry.contextTokens ?? null,
);
const reason = result.reason?.trim();

View File

@@ -113,6 +113,17 @@ describe("shouldRunMemoryFlush", () => {
}),
).toBe(true);
});
it("ignores stale cached totals", () => {
expect(
shouldRunMemoryFlush({
entry: { totalTokens: 96_000, totalTokensFresh: false, compactionCount: 1 },
contextWindowTokens: 100_000,
reserveTokensFloor: 5_000,
softThresholdTokens: 2_000,
}),
).toBe(false);
});
});
describe("resolveMemoryFlushContextWindowTokens", () => {

View File

@@ -1,8 +1,8 @@
import type { OpenClawConfig } from "../../config/config.js";
import type { SessionEntry } from "../../config/sessions.js";
import { lookupContextTokens } from "../../agents/context.js";
import { DEFAULT_CONTEXT_TOKENS } from "../../agents/defaults.js";
import { DEFAULT_PI_COMPACTION_RESERVE_TOKENS_FLOOR } from "../../agents/pi-settings.js";
import { resolveFreshSessionTotalTokens, type SessionEntry } from "../../config/sessions.js";
import { SILENT_REPLY_TOKEN } from "../tokens.js";
export const DEFAULT_MEMORY_FLUSH_SOFT_TOKENS = 4000;
@@ -76,12 +76,15 @@ export function resolveMemoryFlushContextWindowTokens(params: {
}
export function shouldRunMemoryFlush(params: {
entry?: Pick<SessionEntry, "totalTokens" | "compactionCount" | "memoryFlushCompactionCount">;
entry?: Pick<
SessionEntry,
"totalTokens" | "totalTokensFresh" | "compactionCount" | "memoryFlushCompactionCount"
>;
contextWindowTokens: number;
reserveTokensFloor: number;
softThresholdTokens: number;
}): boolean {
const totalTokens = params.entry?.totalTokens;
const totalTokens = resolveFreshSessionTotalTokens(params.entry);
if (!totalTokens || totalTokens <= 0) {
return false;
}

View File

@@ -18,6 +18,7 @@ export async function persistRunSessionUsage(params: PersistRunSessionUsageParam
sessionKey: params.sessionKey,
usage: params.usage,
lastCallUsage: params.lastCallUsage,
promptTokens: params.promptTokens,
modelUsed: params.modelUsed,
providerUsed: params.providerUsed,
contextTokensUsed: params.contextTokensUsed,

View File

@@ -255,6 +255,7 @@ export async function incrementCompactionCount(params: {
// If tokensAfter is provided, update the cached token counts to reflect post-compaction state
if (tokensAfter != null && tokensAfter > 0) {
updates.totalTokens = tokensAfter;
updates.totalTokensFresh = true;
// Clear input/output breakdown since we only have the total estimate after compaction
updates.inputTokens = undefined;
updates.outputTokens = undefined;

View File

@@ -44,12 +44,13 @@ describe("persistSessionUsageUpdate", () => {
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
// totalTokens should reflect lastCallUsage (12_000 input), not accumulated (180_000)
expect(stored[sessionKey].totalTokens).toBe(12_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
// inputTokens/outputTokens still reflect accumulated usage for cost tracking
expect(stored[sessionKey].inputTokens).toBe(180_000);
expect(stored[sessionKey].outputTokens).toBe(10_000);
});
it("falls back to accumulated usage for totalTokens when lastCallUsage not provided", async () => {
it("marks totalTokens as unknown when no fresh context snapshot is available", async () => {
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-usage-"));
const storePath = path.join(tmp, "sessions.json");
const sessionKey = "main";
@@ -67,10 +68,34 @@ describe("persistSessionUsageUpdate", () => {
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
expect(stored[sessionKey].totalTokens).toBe(50_000);
expect(stored[sessionKey].totalTokens).toBeUndefined();
expect(stored[sessionKey].totalTokensFresh).toBe(false);
});
it("caps totalTokens at context window even with lastCallUsage", async () => {
it("uses promptTokens when available without lastCallUsage", async () => {
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-usage-"));
const storePath = path.join(tmp, "sessions.json");
const sessionKey = "main";
await seedSessionStore({
storePath,
sessionKey,
entry: { sessionId: "s1", updatedAt: Date.now() },
});
await persistSessionUsageUpdate({
storePath,
sessionKey,
usage: { input: 50_000, output: 5_000, total: 55_000 },
promptTokens: 42_000,
contextTokensUsed: 200_000,
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
expect(stored[sessionKey].totalTokens).toBe(42_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
});
it("keeps non-clamped lastCallUsage totalTokens when exceeding context window", async () => {
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-usage-"));
const storePath = path.join(tmp, "sessions.json");
const sessionKey = "main";
@@ -89,7 +114,7 @@ describe("persistSessionUsageUpdate", () => {
});
const stored = JSON.parse(await fs.readFile(storePath, "utf-8"));
// Capped at context window
expect(stored[sessionKey].totalTokens).toBe(200_000);
expect(stored[sessionKey].totalTokens).toBe(250_000);
expect(stored[sessionKey].totalTokensFresh).toBe(true);
});
});

View File

@@ -45,20 +45,29 @@ export async function persistSessionUsageUpdate(params: {
const input = params.usage?.input ?? 0;
const output = params.usage?.output ?? 0;
const resolvedContextTokens = params.contextTokensUsed ?? entry.contextTokens;
const hasPromptTokens =
typeof params.promptTokens === "number" &&
Number.isFinite(params.promptTokens) &&
params.promptTokens > 0;
const hasFreshContextSnapshot = Boolean(params.lastCallUsage) || hasPromptTokens;
// Use last-call usage for totalTokens when available. The accumulated
// `usage.input` sums input tokens from every API call in the run
// (tool-use loops, compaction retries), overstating actual context.
// `lastCallUsage` reflects only the final API call — the true context.
const usageForContext = params.lastCallUsage ?? params.usage;
const patch: Partial<SessionEntry> = {
inputTokens: input,
outputTokens: output,
totalTokens:
deriveSessionTotalTokens({
const totalTokens = hasFreshContextSnapshot
? deriveSessionTotalTokens({
usage: usageForContext,
contextTokens: resolvedContextTokens,
promptTokens: params.promptTokens,
}) ?? input,
})
: undefined;
const patch: Partial<SessionEntry> = {
inputTokens: input,
outputTokens: output,
// Missing a last-call snapshot means context utilization is stale/unknown.
totalTokens,
totalTokensFresh: typeof totalTokens === "number",
modelProvider: params.providerUsed ?? entry.modelProvider,
model: params.modelUsed ?? entry.model,
contextTokens: resolvedContextTokens,

View File

@@ -358,6 +358,7 @@ export async function initSessionState(params: {
// Clear stale token metrics from previous session so /status doesn't
// display the old session's context usage after /new or /reset.
sessionEntry.totalTokens = undefined;
sessionEntry.totalTokensFresh = false;
sessionEntry.inputTokens = undefined;
sessionEntry.outputTokens = undefined;
sessionEntry.contextTokens = undefined;