fix(agents): normalize SiliconFlow Pro thinking=off payload (#25435)

Land PR #25435 from @Zjianru.
Changelog: add 2026.2.24 fix entry with contributor credit.

Co-authored-by: codez <codezhujr@gmail.com>
This commit is contained in:
Peter Steinberger
2026-02-25 01:11:20 +00:00
parent 8470dff619
commit bd213cf2ad
3 changed files with 106 additions and 0 deletions

View File

@@ -310,6 +310,68 @@ describe("applyExtraParamsToAgent", () => {
expect(payloads[0]).toEqual({ reasoning: { max_tokens: 256 } });
});
it("normalizes thinking=off to null for SiliconFlow Pro models", () => {
const payloads: Record<string, unknown>[] = [];
const baseStreamFn: StreamFn = (_model, _context, options) => {
const payload: Record<string, unknown> = { thinking: "off" };
options?.onPayload?.(payload);
payloads.push(payload);
return {} as ReturnType<StreamFn>;
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(
agent,
undefined,
"siliconflow",
"Pro/MiniMaxAI/MiniMax-M2.1",
undefined,
"off",
);
const model = {
api: "openai-completions",
provider: "siliconflow",
id: "Pro/MiniMaxAI/MiniMax-M2.1",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(payloads).toHaveLength(1);
expect(payloads[0]?.thinking).toBeNull();
});
it("keeps thinking=off unchanged for non-Pro SiliconFlow model IDs", () => {
const payloads: Record<string, unknown>[] = [];
const baseStreamFn: StreamFn = (_model, _context, options) => {
const payload: Record<string, unknown> = { thinking: "off" };
options?.onPayload?.(payload);
payloads.push(payload);
return {} as ReturnType<StreamFn>;
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(
agent,
undefined,
"siliconflow",
"deepseek-ai/DeepSeek-V3.2",
undefined,
"off",
);
const model = {
api: "openai-completions",
provider: "siliconflow",
id: "deepseek-ai/DeepSeek-V3.2",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(payloads).toHaveLength(1);
expect(payloads[0]?.thinking).toBe("off");
});
it("adds OpenRouter attribution headers to stream options", () => {
const { calls, agent } = createOptionsCaptureAgent();

View File

@@ -408,6 +408,42 @@ function mapThinkingLevelToOpenRouterReasoningEffort(
return thinkingLevel;
}
function shouldApplySiliconFlowThinkingOffCompat(params: {
provider: string;
modelId: string;
thinkingLevel?: ThinkLevel;
}): boolean {
return (
params.provider === "siliconflow" &&
params.thinkingLevel === "off" &&
params.modelId.startsWith("Pro/")
);
}
/**
* SiliconFlow's Pro/* models reject string thinking modes (including "off")
* with HTTP 400 invalid-parameter errors. Normalize to `thinking: null` to
* preserve "thinking disabled" intent without sending an invalid enum value.
*/
function createSiliconFlowThinkingWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
const originalOnPayload = options?.onPayload;
return underlying(model, context, {
...options,
onPayload: (payload) => {
if (payload && typeof payload === "object") {
const payloadObj = payload as Record<string, unknown>;
if (payloadObj.thinking === "off") {
payloadObj.thinking = null;
}
}
originalOnPayload?.(payload);
},
});
};
}
/**
* Create a streamFn wrapper that adds OpenRouter app attribution headers
* and injects reasoning.effort based on the configured thinking level.
@@ -544,6 +580,13 @@ export function applyExtraParamsToAgent(
agent.streamFn = createAnthropicBetaHeadersWrapper(agent.streamFn, anthropicBetas);
}
if (shouldApplySiliconFlowThinkingOffCompat({ provider, modelId, thinkingLevel })) {
log.debug(
`normalizing thinking=off to thinking=null for SiliconFlow compatibility (${provider}/${modelId})`,
);
agent.streamFn = createSiliconFlowThinkingWrapper(agent.streamFn);
}
if (provider === "openrouter") {
log.debug(`applying OpenRouter app attribution headers for ${provider}/${modelId}`);
// "auto" is a dynamic routing model — we don't know which underlying model