From 3855f7788b5123294bfca939cc2d8cbd384cfb58 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 31 Oct 2025 02:49:34 +0000 Subject: [PATCH] =?UTF-8?q?chore:=20=E5=90=8C=E6=AD=A5=E6=A8=A1=E5=9E=8B?= =?UTF-8?q?=E4=BB=B7=E6=A0=BC=E6=95=B0=E6=8D=AE=20(d08dbc2cc3a9a734027f5c0?= =?UTF-8?q?c2aca28b427a8af9cf2ad317d29f1ac5a907aa02e)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model_prices_and_context_window.json | 30 +++++++++++++++----------- model_prices_and_context_window.sha256 | 2 +- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 7103ffd4..624dea84 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -7073,7 +7073,8 @@ "output_cost_per_token": 6e-07, "litellm_provider": "deepinfra", "mode": "chat", - "supports_tool_choice": true + "supports_tool_choice": true, + "supports_vision": true }, "deepinfra/Qwen/Qwen3-14B": { "max_tokens": 40960, @@ -18620,7 +18621,8 @@ "max_tokens": 8192, "mode": "chat", "output_cost_per_token": 6.3e-07, - "supports_tool_choice": true + "supports_tool_choice": true, + "supports_vision": true }, "openrouter/qwen/qwen3-coder": { "input_cost_per_token": 1e-06, @@ -21143,16 +21145,6 @@ "mode": "chat", "output_cost_per_token": 2.4e-07 }, - "vercel_ai_gateway/glm-4.6": { - "litellm_provider": "vercel_ai_gateway", - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 6e-07, - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 2.2e-06 - }, "vercel_ai_gateway/alibaba/qwen-3-235b": { "input_cost_per_token": 2e-07, "litellm_provider": "vercel_ai_gateway", @@ -21986,6 +21978,20 @@ "mode": "chat", "output_cost_per_token": 1.1e-06 }, + "vercel_ai_gateway/zai/glm-4.6": { + "litellm_provider": "vercel_ai_gateway", + "cache_read_input_token_cost": 1.1e-07, + "input_cost_per_token": 4.5e-07, + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 1.8e-06, + "source": "https://vercel.com/ai-gateway/models/glm-4.6", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, "vertex_ai/claude-3-5-haiku": { "input_cost_per_token": 1e-06, "litellm_provider": "vertex_ai-anthropic_models", diff --git a/model_prices_and_context_window.sha256 b/model_prices_and_context_window.sha256 index 28072570..319ed23e 100644 --- a/model_prices_and_context_window.sha256 +++ b/model_prices_and_context_window.sha256 @@ -1 +1 @@ -0993471f2ec29cc215559ce0bdd67552391153a7e9d1bcc55e59b93d57c340a5 +d08dbc2cc3a9a734027f5c0c2aca28b427a8af9cf2ad317d29f1ac5a907aa02e