fix(failover): narrow service-unavailable to require overload indicator (#32828) (#36646)

Merged via squash.

Prepared head SHA: 46fb430612
Co-authored-by: jnMetaCode <12096460+jnMetaCode@users.noreply.github.com>
Co-authored-by: altaywtf <9790196+altaywtf@users.noreply.github.com>
Reviewed-by: @altaywtf
This commit is contained in:
jiangnan
2026-03-06 05:01:57 +08:00
committed by GitHub
parent f014e255df
commit 029c473727
4 changed files with 24 additions and 2 deletions

View File

@@ -139,6 +139,7 @@ Docs: https://docs.openclaw.ai
- Mattermost/plugin SDK import policy: replace remaining monolithic `openclaw/plugin-sdk` imports in Mattermost mention-gating paths/tests with scoped subpaths (`openclaw/plugin-sdk/compat` and `openclaw/plugin-sdk/mattermost`) so `pnpm check` passes `lint:plugins:no-monolithic-plugin-sdk-entry-imports` on baseline. (#36480) Thanks @Takhoffman. - Mattermost/plugin SDK import policy: replace remaining monolithic `openclaw/plugin-sdk` imports in Mattermost mention-gating paths/tests with scoped subpaths (`openclaw/plugin-sdk/compat` and `openclaw/plugin-sdk/mattermost`) so `pnpm check` passes `lint:plugins:no-monolithic-plugin-sdk-entry-imports` on baseline. (#36480) Thanks @Takhoffman.
- Agents/failover cooldown classification: stop treating generic `cooling down` text as provider `rate_limit` so healthy models no longer show false global cooldown/rate-limit warnings while explicit `model_cooldown` markers still trigger failover. (#32972) thanks @stakeswky. - Agents/failover cooldown classification: stop treating generic `cooling down` text as provider `rate_limit` so healthy models no longer show false global cooldown/rate-limit warnings while explicit `model_cooldown` markers still trigger failover. (#32972) thanks @stakeswky.
- Agents/failover service-unavailable handling: stop treating bare proxy/CDN `service unavailable` errors as provider overload while keeping them retryable via the timeout/failover path, so transient outages no longer show false rate-limit warnings or block fallback. (#36646) thanks @jnMetaCode.
## 2026.3.2 ## 2026.3.2

View File

@@ -540,13 +540,20 @@ describe("classifyFailoverReason", () => {
"This model is currently experiencing high demand. Please try again later.", "This model is currently experiencing high demand. Please try again later.",
), ),
).toBe("rate_limit"); ).toBe("rate_limit");
expect(classifyFailoverReason("LLM error: service unavailable")).toBe("rate_limit"); // "service unavailable" combined with overload/capacity indicator → rate_limit
// (exercises the new regex — none of the standalone patterns match here)
expect(classifyFailoverReason("service unavailable due to capacity limits")).toBe("rate_limit");
expect( expect(
classifyFailoverReason( classifyFailoverReason(
'{"error":{"code":503,"message":"The model is overloaded. Please try later","status":"UNAVAILABLE"}}', '{"error":{"code":503,"message":"The model is overloaded. Please try later","status":"UNAVAILABLE"}}',
), ),
).toBe("rate_limit"); ).toBe("rate_limit");
}); });
it("classifies bare 'service unavailable' as timeout instead of rate_limit (#32828)", () => {
// A generic "service unavailable" from a proxy/CDN should stay retryable,
// but it should not be treated as provider overload / rate limit.
expect(classifyFailoverReason("LLM error: service unavailable")).toBe("timeout");
});
it("classifies permanent auth errors as auth_permanent", () => { it("classifies permanent auth errors as auth_permanent", () => {
expect(classifyFailoverReason("invalid_api_key")).toBe("auth_permanent"); expect(classifyFailoverReason("invalid_api_key")).toBe("auth_permanent");
expect(classifyFailoverReason("Your api key has been revoked")).toBe("auth_permanent"); expect(classifyFailoverReason("Your api key has been revoked")).toBe("auth_permanent");

View File

@@ -15,12 +15,16 @@ const ERROR_PATTERNS = {
overloaded: [ overloaded: [
/overloaded_error|"type"\s*:\s*"overloaded_error"/i, /overloaded_error|"type"\s*:\s*"overloaded_error"/i,
"overloaded", "overloaded",
"service unavailable", // Match "service unavailable" only when combined with an explicit overload
// indicator — a generic 503 from a proxy/CDN should not be classified as
// provider-overload (#32828).
/service[_ ]unavailable.*(?:overload|capacity|high[_ ]demand)|(?:overload|capacity|high[_ ]demand).*service[_ ]unavailable/i,
"high demand", "high demand",
], ],
timeout: [ timeout: [
"timeout", "timeout",
"timed out", "timed out",
"service unavailable",
"deadline exceeded", "deadline exceeded",
"context deadline exceeded", "context deadline exceeded",
"connection error", "connection error",

View File

@@ -658,6 +658,16 @@ describe("runEmbeddedPiAgent auth profile rotation", () => {
expect(usageStats["openai:p1"]?.cooldownUntil).toBeUndefined(); expect(usageStats["openai:p1"]?.cooldownUntil).toBeUndefined();
}); });
it("rotates on bare service unavailable without cooling down the profile", async () => {
const { usageStats } = await runAutoPinnedRotationCase({
errorMessage: "LLM error: service unavailable",
sessionKey: "agent:test:service-unavailable-no-cooldown",
runId: "run:service-unavailable-no-cooldown",
});
expect(typeof usageStats["openai:p2"]?.lastUsed).toBe("number");
expect(usageStats["openai:p1"]?.cooldownUntil).toBeUndefined();
});
it("does not rotate for compaction timeouts", async () => { it("does not rotate for compaction timeouts", async () => {
await withAgentWorkspace(async ({ agentDir, workspaceDir }) => { await withAgentWorkspace(async ({ agentDir, workspaceDir }) => {
await writeAuthStore(agentDir); await writeAuthStore(agentDir);