mirror of
https://github.com/Wei-Shaw/claude-relay-service.git
synced 2026-01-22 16:43:35 +00:00
chore: 同步模型价格数据 (a62115bcc51131b5938953206ac6dd7b5a5afdc57249f4cd5822900e5e46d85e)
This commit is contained in:
@@ -16863,14 +16863,14 @@
|
||||
"supports_vision": true
|
||||
},
|
||||
"gpt-4o-audio-preview": {
|
||||
"input_cost_per_audio_token": 0.0001,
|
||||
"input_cost_per_audio_token": 4e-05,
|
||||
"input_cost_per_token": 2.5e-06,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 0.0002,
|
||||
"output_cost_per_audio_token": 8e-05,
|
||||
"output_cost_per_token": 1e-05,
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
@@ -16880,14 +16880,14 @@
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"gpt-4o-audio-preview-2024-10-01": {
|
||||
"input_cost_per_audio_token": 0.0001,
|
||||
"input_cost_per_audio_token": 4e-05,
|
||||
"input_cost_per_token": 2.5e-06,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 0.0002,
|
||||
"output_cost_per_audio_token": 8e-05,
|
||||
"output_cost_per_token": 1e-05,
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
@@ -16930,6 +16930,186 @@
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"gpt-audio": {
|
||||
"input_cost_per_audio_token": 3.2e-05,
|
||||
"input_cost_per_token": 2.5e-06,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 6.4e-05,
|
||||
"output_cost_per_token": 1e-05,
|
||||
"supported_endpoints": [
|
||||
"/v1/chat/completions",
|
||||
"/v1/responses",
|
||||
"/v1/realtime",
|
||||
"/v1/batch"
|
||||
],
|
||||
"supported_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supported_output_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_native_streaming": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_prompt_caching": false,
|
||||
"supports_reasoning": false,
|
||||
"supports_response_schema": false,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true,
|
||||
"supports_vision": false
|
||||
},
|
||||
"gpt-audio-2025-08-28": {
|
||||
"input_cost_per_audio_token": 3.2e-05,
|
||||
"input_cost_per_token": 2.5e-06,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 6.4e-05,
|
||||
"output_cost_per_token": 1e-05,
|
||||
"supported_endpoints": [
|
||||
"/v1/chat/completions",
|
||||
"/v1/responses",
|
||||
"/v1/realtime",
|
||||
"/v1/batch"
|
||||
],
|
||||
"supported_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supported_output_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_native_streaming": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_prompt_caching": false,
|
||||
"supports_reasoning": false,
|
||||
"supports_response_schema": false,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true,
|
||||
"supports_vision": false
|
||||
},
|
||||
"gpt-audio-mini": {
|
||||
"input_cost_per_audio_token": 1e-05,
|
||||
"input_cost_per_token": 6e-07,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 2e-05,
|
||||
"output_cost_per_token": 2.4e-06,
|
||||
"supported_endpoints": [
|
||||
"/v1/chat/completions",
|
||||
"/v1/responses",
|
||||
"/v1/realtime",
|
||||
"/v1/batch"
|
||||
],
|
||||
"supported_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supported_output_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_native_streaming": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_prompt_caching": false,
|
||||
"supports_reasoning": false,
|
||||
"supports_response_schema": false,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true,
|
||||
"supports_vision": false
|
||||
},
|
||||
"gpt-audio-mini-2025-10-06": {
|
||||
"input_cost_per_audio_token": 1e-05,
|
||||
"input_cost_per_token": 6e-07,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 2e-05,
|
||||
"output_cost_per_token": 2.4e-06,
|
||||
"supported_endpoints": [
|
||||
"/v1/chat/completions",
|
||||
"/v1/responses",
|
||||
"/v1/realtime",
|
||||
"/v1/batch"
|
||||
],
|
||||
"supported_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supported_output_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_native_streaming": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_prompt_caching": false,
|
||||
"supports_reasoning": false,
|
||||
"supports_response_schema": false,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true,
|
||||
"supports_vision": false
|
||||
},
|
||||
"gpt-audio-mini-2025-12-15": {
|
||||
"input_cost_per_audio_token": 1e-05,
|
||||
"input_cost_per_token": 6e-07,
|
||||
"litellm_provider": "openai",
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"max_tokens": 16384,
|
||||
"mode": "chat",
|
||||
"output_cost_per_audio_token": 2e-05,
|
||||
"output_cost_per_token": 2.4e-06,
|
||||
"supported_endpoints": [
|
||||
"/v1/chat/completions",
|
||||
"/v1/responses",
|
||||
"/v1/realtime",
|
||||
"/v1/batch"
|
||||
],
|
||||
"supported_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supported_output_modalities": [
|
||||
"text",
|
||||
"audio"
|
||||
],
|
||||
"supports_audio_input": true,
|
||||
"supports_audio_output": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_native_streaming": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_prompt_caching": false,
|
||||
"supports_reasoning": false,
|
||||
"supports_response_schema": false,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true,
|
||||
"supports_vision": false
|
||||
},
|
||||
"gpt-4o-mini": {
|
||||
"cache_read_input_token_cost": 7.5e-08,
|
||||
"cache_read_input_token_cost_priority": 1.25e-07,
|
||||
|
||||
@@ -1 +1 @@
|
||||
563b13fee55f8252c322fd50a00bb3b72d41a4aa049512990c74e896f56314b7
|
||||
a62115bcc51131b5938953206ac6dd7b5a5afdc57249f4cd5822900e5e46d85e
|
||||
|
||||
Reference in New Issue
Block a user