From a459981da0ec856e9765a45bfce47b23ad451d61 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 13 Nov 2025 19:04:43 +0000 Subject: [PATCH] =?UTF-8?q?chore:=20=E5=90=8C=E6=AD=A5=E6=A8=A1=E5=9E=8B?= =?UTF-8?q?=E4=BB=B7=E6=A0=BC=E6=95=B0=E6=8D=AE=20(fee5daaf2561997bf62563d?= =?UTF-8?q?af845b7fa4f9740c6ee7b235af71411619ee9801b)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model_prices_and_context_window.json | 173 +++++++++++++++++++++++++ model_prices_and_context_window.sha256 | 2 +- 2 files changed, 174 insertions(+), 1 deletion(-) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 4eac8335..fa36e2d6 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -13852,6 +13852,113 @@ "supports_service_tier": true, "supports_vision": true }, + "gpt-5.1": { + "cache_read_input_token_cost": 1.25e-07, + "cache_read_input_token_cost_priority": 2.5e-07, + "input_cost_per_token": 1.25e-06, + "input_cost_per_token_priority": 2.5e-06, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-05, + "output_cost_per_token_priority": 2e-05, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.1-2025-11-13": { + "cache_read_input_token_cost": 1.25e-07, + "cache_read_input_token_cost_priority": 2.5e-07, + "input_cost_per_token": 1.25e-06, + "input_cost_per_token_priority": 2.5e-06, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-05, + "output_cost_per_token_priority": 2e-05, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.1-chat-latest": { + "cache_read_input_token_cost": 1.25e-07, + "cache_read_input_token_cost_priority": 2.5e-07, + "input_cost_per_token": 1.25e-06, + "input_cost_per_token_priority": 2.5e-06, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1e-05, + "output_cost_per_token_priority": 2e-05, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": false, + "supports_native_streaming": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, "gpt-5-pro": { "input_cost_per_token": 1.5e-05, "input_cost_per_token_batches": 7.5e-06, @@ -14051,6 +14158,72 @@ "supports_tool_choice": true, "supports_vision": true }, + "gpt-5.1-codex": { + "cache_read_input_token_cost": 1.25e-07, + "cache_read_input_token_cost_priority": 2.5e-07, + "input_cost_per_token": 1.25e-06, + "input_cost_per_token_priority": 2.5e-06, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "responses", + "output_cost_per_token": 1e-05, + "output_cost_per_token_priority": 2e-05, + "supported_endpoints": [ + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-5.1-codex-mini": { + "cache_read_input_token_cost": 2.5e-08, + "cache_read_input_token_cost_priority": 4.5e-08, + "input_cost_per_token": 2.5e-07, + "input_cost_per_token_priority": 4.5e-07, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "responses", + "output_cost_per_token": 2e-06, + "output_cost_per_token_priority": 3.6e-06, + "supported_endpoints": [ + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": false, + "supports_tool_choice": true, + "supports_vision": true + }, "gpt-5-mini": { "cache_read_input_token_cost": 2.5e-08, "cache_read_input_token_cost_flex": 1.25e-08, diff --git a/model_prices_and_context_window.sha256 b/model_prices_and_context_window.sha256 index 72c7f22d..b2a224f6 100644 --- a/model_prices_and_context_window.sha256 +++ b/model_prices_and_context_window.sha256 @@ -1 +1 @@ -df1bee44f092559290c9d782f7a3739512c87bc42040c96fcfc749c25778ae60 +fee5daaf2561997bf62563daf845b7fa4f9740c6ee7b235af71411619ee9801b