mirror of
https://github.com/Wei-Shaw/claude-relay-service.git
synced 2026-01-23 09:38:02 +00:00
chore: 同步模型价格数据 (c82b2fecf991ca70074dbfefd3b6e7f82bd0de3097699e289944906d81a213ee)
This commit is contained in:
@@ -18309,6 +18309,21 @@
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/deepseek/deepseek-v3.2-exp": {
|
||||
"input_cost_per_token": 2e-07,
|
||||
"input_cost_per_token_cache_hit": 2e-08,
|
||||
"litellm_provider": "openrouter",
|
||||
"max_input_tokens": 163840,
|
||||
"max_output_tokens": 163840,
|
||||
"max_tokens": 8192,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 4e-07,
|
||||
"supports_assistant_prefill": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_reasoning": false,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/deepseek/deepseek-coder": {
|
||||
"input_cost_per_token": 1.4e-07,
|
||||
"litellm_provider": "openrouter",
|
||||
@@ -18552,6 +18567,19 @@
|
||||
"output_cost_per_token": 1e-06,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/minimax/minimax-m2": {
|
||||
"input_cost_per_token": 2.55e-7,
|
||||
"litellm_provider": "openrouter",
|
||||
"max_input_tokens": 204800,
|
||||
"max_output_tokens": 204800,
|
||||
"max_tokens": 32768,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 1.02e-6,
|
||||
"supports_function_calling": true,
|
||||
"supports_prompt_caching": false,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/mistralai/mistral-7b-instruct": {
|
||||
"input_cost_per_token": 1.3e-07,
|
||||
"litellm_provider": "openrouter",
|
||||
@@ -19023,15 +19051,16 @@
|
||||
"supports_vision": true
|
||||
},
|
||||
"openrouter/qwen/qwen3-coder": {
|
||||
"input_cost_per_token": 1e-06,
|
||||
"input_cost_per_token": 2.2e-7,
|
||||
"litellm_provider": "openrouter",
|
||||
"max_input_tokens": 1000000,
|
||||
"max_output_tokens": 1000000,
|
||||
"max_tokens": 1000000,
|
||||
"max_input_tokens": 262100,
|
||||
"max_output_tokens": 262100,
|
||||
"max_tokens": 262100,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 5e-06,
|
||||
"output_cost_per_token": 9.5e-7,
|
||||
"source": "https://openrouter.ai/qwen/qwen3-coder",
|
||||
"supports_tool_choice": true
|
||||
"supports_tool_choice": true,
|
||||
"supports_function_calling": true
|
||||
},
|
||||
"openrouter/switchpoint/router": {
|
||||
"input_cost_per_token": 8.5e-07,
|
||||
@@ -19080,6 +19109,32 @@
|
||||
"supports_tool_choice": true,
|
||||
"supports_web_search": false
|
||||
},
|
||||
"openrouter/z-ai/glm-4.6": {
|
||||
"input_cost_per_token": 4.0e-7,
|
||||
"litellm_provider": "openrouter",
|
||||
"max_input_tokens": 202800,
|
||||
"max_output_tokens": 131000,
|
||||
"max_tokens": 202800,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 1.75e-6,
|
||||
"source": "https://openrouter.ai/z-ai/glm-4.6",
|
||||
"supports_function_calling": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/z-ai/glm-4.6:exacto": {
|
||||
"input_cost_per_token": 4.5e-7,
|
||||
"litellm_provider": "openrouter",
|
||||
"max_input_tokens": 202800,
|
||||
"max_output_tokens": 131000,
|
||||
"max_tokens": 202800,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 1.9e-6,
|
||||
"source": "https://openrouter.ai/z-ai/glm-4.6:exacto",
|
||||
"supports_function_calling": true,
|
||||
"supports_reasoning": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"ovhcloud/DeepSeek-R1-Distill-Llama-70B": {
|
||||
"input_cost_per_token": 6.7e-07,
|
||||
"litellm_provider": "ovhcloud",
|
||||
@@ -24059,7 +24114,6 @@
|
||||
"supports_parallel_function_calling": false,
|
||||
"supports_vision": false
|
||||
},
|
||||
|
||||
"whisper-1": {
|
||||
"input_cost_per_second": 0.0001,
|
||||
"litellm_provider": "openai",
|
||||
@@ -24069,30 +24123,6 @@
|
||||
"/v1/audio/transcriptions"
|
||||
]
|
||||
},
|
||||
"vertex_ai/qwen/qwen3-next-80b-a3b-instruct-maas": {
|
||||
"input_cost_per_token": 1.5e-07,
|
||||
"litellm_provider": "vertex_ai-qwen_models",
|
||||
"max_input_tokens": 262144,
|
||||
"max_output_tokens": 262144,
|
||||
"max_tokens": 262144,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 1.2e-06,
|
||||
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
||||
"supports_function_calling": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"vertex_ai/qwen/qwen3-next-80b-a3b-thinking-maas": {
|
||||
"input_cost_per_token": 1.5e-07,
|
||||
"litellm_provider": "vertex_ai-qwen_models",
|
||||
"max_input_tokens": 262144,
|
||||
"max_output_tokens": 262144,
|
||||
"max_tokens": 262144,
|
||||
"mode": "chat",
|
||||
"output_cost_per_token": 1.2e-06,
|
||||
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
||||
"supports_function_calling": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"xai/grok-2": {
|
||||
"input_cost_per_token": 2e-06,
|
||||
"litellm_provider": "xai",
|
||||
|
||||
@@ -1 +1 @@
|
||||
520b582c460b52ee3dd2cce6a2936df47907ff2e47b92bee2c2cbc060a43b47b
|
||||
c82b2fecf991ca70074dbfefd3b6e7f82bd0de3097699e289944906d81a213ee
|
||||
|
||||
Reference in New Issue
Block a user