diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 634ea6dc..d4afde20 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -5164,6 +5164,19 @@ "supports_function_calling": true, "supports_tool_choice": true }, + "azure_ai/mistral-large-3": { + "input_cost_per_token": 5e-07, + "litellm_provider": "azure_ai", + "max_input_tokens": 256000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 1.5e-06, + "source": "https://azure.microsoft.com/en-us/blog/introducing-mistral-large-3-in-microsoft-foundry-open-capable-and-ready-for-production-workloads/", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, "azure_ai/mistral-medium-2505": { "input_cost_per_token": 4e-07, "litellm_provider": "azure_ai", @@ -18745,6 +18758,21 @@ "supports_response_schema": true, "supports_tool_choice": true }, + "mistral/mistral-large-3": { + "input_cost_per_token": 5e-07, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 1.5e-06, + "source": "https://docs.mistral.ai/models/mistral-large-3-25-12", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, "mistral/mistral-medium": { "input_cost_per_token": 2.7e-06, "litellm_provider": "mistral", diff --git a/model_prices_and_context_window.sha256 b/model_prices_and_context_window.sha256 index fff9fe20..d39fc442 100644 --- a/model_prices_and_context_window.sha256 +++ b/model_prices_and_context_window.sha256 @@ -1 +1 @@ -d2330f06c452e4fa3a3868d19bcdbb23bdde477e9d8c970d52af74c69fe0f81e +63527da56043cc9655f823c338817a42e604b11542a6123638f6a220a68fbe11