diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 21c9f53d..3e1c4df8 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -22283,6 +22283,50 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 159 }, + "vertex_ai/mistralai/codestral-2@001": { + "input_cost_per_token": 3e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-07, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral-2": { + "input_cost_per_token": 3e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-07, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral-2@001": { + "input_cost_per_token": 3e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-07, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistralai/codestral-2": { + "input_cost_per_token": 3e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-07, + "supports_function_calling": true, + "supports_tool_choice": true + }, "vertex_ai/codestral-2501": { "input_cost_per_token": 2e-07, "litellm_provider": "vertex_ai-mistral_models", @@ -22612,6 +22656,50 @@ "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", "supports_tool_choice": true }, + "vertex_ai/mistral-medium-3": { + "input_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-06, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-medium-3@001": { + "input_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-06, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistralai/mistral-medium-3": { + "input_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-06, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistralai/mistral-medium-3@001": { + "input_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-06, + "supports_function_calling": true, + "supports_tool_choice": true + }, "vertex_ai/mistral-large-2411": { "input_cost_per_token": 2e-06, "litellm_provider": "vertex_ai-mistral_models", diff --git a/model_prices_and_context_window.sha256 b/model_prices_and_context_window.sha256 index 2d24a5e6..c7f68602 100644 --- a/model_prices_and_context_window.sha256 +++ b/model_prices_and_context_window.sha256 @@ -1 +1 @@ -6f3bcef0ce1eb943bcdf4ee07ed231a95698c3408649813cecd5235d49e74a7c +3af64dbbef5021f2441e1ea575efb02a3fe0dafe995ccab73b84d85b78167413