Compare commits

...

65 Commits

Author SHA1 Message Date
CaIon
6ca4ff503e feat: remove unnecessary section for screenshots in bug report templates 2026-03-14 15:50:42 +08:00
CaIon
dea9353842 feat: update issue and feature request templates to include documentation links and submission checks 2026-03-14 15:48:50 +08:00
CaIon
4deb1eb70f feat: update ratio label for user group handling in render component 2026-03-14 15:41:02 +08:00
CaIon
092ee07e94 feat: normalize number handling in model pricing editor #3246 2026-03-14 15:29:47 +08:00
CaIon
4e1b05e987 feat: add conditional setting for HTTP headers in OpenRouter channel type 2026-03-12 19:05:30 +08:00
CaIon
3bd1a167c9 feat: comment out notify endpoint in relay router 2026-03-12 19:05:30 +08:00
Seefs
7a0ff73c1b Merge pull request #3221 from RedwindA/chore/updateModelList
chore: update model lists for frequently used channels
2026-03-12 15:13:03 +08:00
CaIon
902725eb66 feat: update header title for OpenRouter channel type 2026-03-12 15:05:58 +08:00
RedwindA
cd1b3771bf chore: update model lists for frequently used channels 2026-03-11 23:39:18 +08:00
Calcium-Ion
122d5c00ef Merge pull request #3182 from seefs001/feature/params-override-beta-header-append
feat:support $keep_only_declared and deduped $append for header override
2026-03-10 02:03:02 +08:00
Seefs
c3b9ae5f3b refactor: optimize header override copy and JSON example dialog 2026-03-10 01:59:34 +08:00
CaIon
2c9b22153f feat: enhance Claude request header handling with append functionality 2026-03-09 23:47:51 +08:00
Calcium-Ion
80c09d7119 Merge pull request #3147 from pigletfly/compose-add-networks
fix: add explicit docker-compose networks
2026-03-09 22:19:56 +08:00
Calcium-Ion
b0d8b563c3 Merge pull request #3148 from feitianbubu/pr/d8a25d36204224f8a4248b0ab3b03ba703796ea3
fix: kling risk fail return openAIVideo error
2026-03-09 22:19:04 +08:00
Seefs
6ff5a5dc99 feat:support $keep_only_declared and deduped $append for header token overrides 2026-03-09 00:12:53 +08:00
CaIon
9bb2b6a6ae feat: implement token key fetching and masking in API responses 2026-03-08 22:40:40 +08:00
Calcium-Ion
c706a5c29a Merge pull request #3166 from somnifex/main
为渠道参数覆盖可视化规则提供拖拽排序支持
2026-03-07 15:02:35 +08:00
somnifex
9b394231c8 feat: add drag-and-drop functionality for operation reordering in ParamOverrideEditorModal 2026-03-07 14:10:06 +08:00
CaIon
05452c1558 feat: integrate site display type into pricing components
Add siteDisplayType prop across various pricing components to conditionally render pricing information based on the selected display type. This update enhances the user experience by ensuring that pricing details are accurately represented according to the chosen display mode, particularly for token-based views.
2026-03-07 00:23:36 +08:00
CaIon
f9b5ecc955 feat: add billing display mode selection and update pricing rendering
Introduce a billing display mode feature allowing users to toggle between price and ratio views. Update relevant components and hooks to support this new functionality, ensuring consistent pricing information is displayed across the application.
2026-03-06 23:35:17 +08:00
CaIon
d796578880 fix: unify pricing labels and expand marketplace pricing display
Keep the model pricing editor wording aligned with the new price-based UI while exposing cache, image, and audio pricing in the marketplace so users can see the full configured pricing model.
2026-03-06 22:33:51 +08:00
CaIon
3c71e0cd09 fix(video_proxy): update task retrieval to include user ID for improved context 2026-03-06 22:06:42 +08:00
CaIon
8186ed0ea5 fix: update language settings and improve model pricing editor for better clarity and functionality 2026-03-06 21:36:51 +08:00
CaIon
782124510a fix: update OpenAI request fields to use json.RawMessage for dynamic data handling 2026-03-06 19:10:42 +08:00
Calcium-Ion
4d421f525e Merge pull request #3151 from seefs001/feature/bad-responses-body-no-retry
fix(relay): skip retries for bad response body errors
2026-03-06 18:23:43 +08:00
Seefs
3cb0ca264f fix(relay): skip retries for bad response body errors 2026-03-06 18:22:25 +08:00
CaIon
aa455e7977 fix: update OpenAI response structure to use json.RawMessage for dynamic fields 2026-03-06 17:50:45 +08:00
feitianbubu
c3a96bc980 fix: kling risk fail return openAIVideo error 2026-03-06 16:32:52 +08:00
pigletfly
bb0d4b0f6d fix(compose): Add explicit bridge network 2026-03-06 15:44:47 +08:00
Calcium-Ion
e768ae44e1 Merge pull request #3141 from seefs001/fix/claude-thinking-top_p
fix: If top_p is not provided, Claude's logic will set to 1
2026-03-06 12:08:12 +08:00
Seefs
be8a623586 fix: ignore top_p 2026-03-06 12:07:36 +08:00
Seefs
a6ede75415 fix: ignore top_p 2026-03-06 12:07:00 +08:00
Seefs
267c99b779 fix: If top_p is not provided, Claude's logic will automatically set it to 1. 2026-03-06 12:03:51 +08:00
Calcium-Ion
44e59e1ced Merge pull request #2769 from feitianbubu/pr/3d0aaa75866f8d958a777a7e7ac8c1e4b5b3e537
feat: kling cost quota support use FinalUnitDeduction as totalToken
2026-03-06 11:46:59 +08:00
CaIon
18aa0de323 fix: add support for gpt-5.4 model in model_ratio.go 2026-03-06 11:43:05 +08:00
Seefs
f0e938a513 Merge pull request #3120 from nekohy/main
feats: repair the thinking of claude to openrouter convert
2026-03-05 18:10:46 +08:00
Seefs
db8243bb36 Merge pull request #3130 from feitianbubu/pr/ce221d98d71ab7aec3eb60f27ca33dbb4dc9610a
fix: fetch model add header passthrough rule key check
2026-03-05 18:09:44 +08:00
feitianbubu
1b85b183e6 fix: fetch model add header passthrough rule key check 2026-03-05 17:49:36 +08:00
Calcium-Ion
56c971691b Merge pull request #3129 from seefs001/feature/param-override-wildcard-path
Feature/param override wildcard path
2026-03-05 16:53:39 +08:00
Seefs
9d4ea49984 chore: remove top-right field guide entry in param override editor 2026-03-05 16:43:15 +08:00
Seefs
16e4ce52e3 feat: add wildcard path support and improve param override templates/editor 2026-03-05 16:39:34 +08:00
Nekohy
de12d6df05 delete some if 2026-03-05 06:24:22 +08:00
Nekohy
5b264f3a57 feats: repair the thinking of claude to openrouter convert 2026-03-05 06:12:48 +08:00
CaIon
887a929d65 fix: add multilingual support for meta description in index.html 2026-03-04 18:19:19 +08:00
Calcium-Ion
34262dc8c3 Merge pull request #3093 from feitianbubu/pr/92ad4854fcb501216dd9f2155c19f0556e4655bc
fix: update task billing log content to include reason
2026-03-04 18:13:59 +08:00
CaIon
ddffccc499 fix: update meta description for improved clarity and accuracy 2026-03-04 18:07:17 +08:00
CaIon
c31f9db61e feat: enhance PricingTags and SelectableButtonGroup with new badge styles and color variants 2026-03-04 00:36:04 +08:00
CaIon
3b65c32573 fix: improve error message for unsupported image generation models 2026-03-04 00:36:03 +08:00
Calcium-Ion
196f534c41 Merge pull request #3096 from seefs001/fix/auto-fetch-upstream-model-tips
Fix/auto fetch upstream model tips
2026-03-03 14:47:43 +08:00
Seefs
40c36b1a30 fix: count ignored models from unselected items in upstream update toast 2026-03-03 14:29:43 +08:00
Calcium-Ion
ae1c8e4173 fix: use default model price for radio price model (#3090) 2026-03-03 14:29:03 +08:00
Seefs
429b7428f4 fix: remove extra spaces 2026-03-03 14:08:43 +08:00
Seefs
0a804f0e70 fix: refine upstream update ignore UX and detect behavior 2026-03-03 14:00:48 +08:00
feitianbubu
5f3c5f14d4 fix: update task billing log content to include reason 2026-03-03 12:37:43 +08:00
feitianbubu
d12cc3a8da fix: use default model price for radio price model 2026-03-03 11:22:04 +08:00
Seefs
e71f5a45f2 feat: auto fetch upstream models (#2979)
* feat: add upstream model update detection with scheduled sync and manual apply flows

* feat: support upstream model removal sync and selectable deletes in update modal

* feat: add detect-only upstream updates and show compact +/- model badges

* feat: improve upstream model update UX

* feat: improve upstream model update UX

* fix: respect model_mapping in upstream update detection

* feat: improve upstream update modal to prevent missed add/remove actions

* feat: add admin upstream model update notifications with digest and truncation

* fix: avoid repeated partial-submit confirmation in upstream update modal

* feat: improve ui/ux

* feat: suppress upstream update alerts for unchanged channel-count within 24h

* fix: submit upstream update choices even when no models are selected

* feat: improve upstream model update flow and split frontend updater

* fix merge conflict
2026-03-02 22:01:53 +08:00
Calcium-Ion
d36f4205a9 Merge pull request #3081 from BenLampson/main
Return error when model price/ratio unset
2026-03-02 22:01:21 +08:00
Calcium-Ion
e593c11eab Merge pull request #3037 from RedwindA/fix/token-model-limits-length
fix: change token model_limits column from varchar(1024) to text
2026-03-02 22:00:21 +08:00
CaIon
477e9cf7db feat: add AionUI to chat settings and built-in templates 2026-03-02 21:19:04 +08:00
Calcium-Ion
1d3dcc0afa Merge pull request #3083 from QuantumNous/revert-3077-fix/aws-non-empty-text
Revert "fix: aws text content blocks must be non-empty"
2026-03-02 19:43:28 +08:00
Fat Person
4b439ad3be Return error when model price/ratio unset
#3079
Change ModelPriceHelperPerCall to return (PriceData, error) and stop silently falling back to a default price. If a model price is not configured the helper now returns an error (unless the user has AcceptUnsetRatioModel enabled and a ratio exists). Propagate this error to callers: Midjourney handlers now return a MidjourneyResponse with Code 4 and the error message, and task submission returns a wrapped task error with HTTP 400. Also extract remix video_id in ResolveOriginTask for remix actions. This enforces explicit model price/ratio configuration and surfaces configuration issues to clients.
2026-03-02 19:09:48 +08:00
RedwindA
43e068c0c0 fix: enhance migrateTokenModelLimitsToText function to return errors and improve migration checks 2026-02-28 19:08:03 +08:00
RedwindA
52c29e7582 fix: migrate model_limits column from varchar(1024) to text for existing tables 2026-02-28 18:49:06 +08:00
RedwindA
db8534b4a3 fix: change token model_limits column from varchar(1024) to text
Fixes #3033 — users with many model limits hit PostgreSQL's varchar
length constraint. The text type is supported across all three
databases (SQLite, MySQL, PostgreSQL) with no length restriction.
2026-02-27 14:47:20 +08:00
feitianbubu
1a8567758f feat: kling cost quota support use FinalUnitDeduction as totalToken 2026-01-28 18:42:13 +08:00
128 changed files with 8636 additions and 5654 deletions

View File

@@ -7,14 +7,23 @@ assignees: ''
---
**例行检查**
## 提交前必读(请勿删除本节)
- 文档https://docs.newapi.ai/
- 使用问题先看或先问https://deepwiki.com/QuantumNous/new-api
- 警告:删除本模板、删除小节标题或随意清空内容的 issue可能会被直接关闭重复恶意提交者可能会被 block。
**您当前的 newapi 版本**
请填写,例如:`v1.0.0`
**提交确认**
[//]: # (方框内删除已有的空格,填 x 号)
+ [ ] 我已确认目前没有类似 issue
+ [ ] 我已确认我已升级到最新版本
+ [ ] 我已完整查看过项目 README尤其是常见问题部分
+ [ ] 我理解并愿意跟进此 issue协助测试和提供反馈
+ [ ] 我理解并认可上述内容,并理解项目维护者精力有限,**不遵循规则的 issue 可能会被无视或直接关闭**
+ [ ] 我已完整查看过文档 https://docs.newapi.ai/ 和项目 README尤其是常见问题部分
+ [ ] 我未删除此模板中的任何引导内容或小节标题,并会按要求完整填写
+ [ ] 我理解项目维护者精力有限,不遵循模板要求的 issue 可能会被无视或直接关闭
**问题描述**
@@ -23,4 +32,3 @@ assignees: ''
**预期结果**
**相关截图**
如果没有的话,请删除此节。

View File

@@ -7,14 +7,23 @@ assignees: ''
---
**Routine Checks**
## Read This First (Do Not Remove This Section)
- Docs: https://docs.newapi.ai/
- Usage questions first: https://deepwiki.com/QuantumNous/new-api
- Warning: issues with this template removed, section headings deleted, or content cleared may be closed directly. Repeated abusive submissions may result in a block.
**Your current newapi version**
Please fill this in, for example: `v1.0.0`
**Submission Checks**
[//]: # (Remove the space in the box and fill with an x)
+ [ ] I have confirmed there are no similar issues currently
+ [ ] I have confirmed I have upgraded to the latest version
+ [ ] I have thoroughly read the project README, especially the FAQ section
+ [ ] I understand and am willing to follow up on this issue, assist with testing and provide feedback
+ [ ] I understand and acknowledge the above, and understand that project maintainers have limited time and energy, **issues that do not follow the rules may be ignored or closed directly**
+ [ ] I have confirmed there are no similar issues
+ [ ] I have thoroughly read the docs at https://docs.newapi.ai/ and the project README, especially the FAQ section
+ [ ] I have not removed any guidance or section headings from this template and will complete it as requested
+ [ ] I understand that maintainers have limited time and issues that do not follow this template may be ignored or closed directly
**Issue Description**
@@ -23,4 +32,3 @@ assignees: ''
**Expected Result**
**Related Screenshots**
If none, please delete this section.

View File

@@ -1,5 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: 项目群聊
url: https://private-user-images.githubusercontent.com/61247483/283011625-de536a8a-0161-47a7-a0a2-66ef6de81266.jpeg?jwt=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJnaXRodWIuY29tIiwiYXVkIjoicmF3LmdpdGh1YnVzZXJjb250ZW50LmNvbSIsImtleSI6ImtleTEiLCJleHAiOjE3MDIyMjQzOTAsIm5iZiI6MTcwMjIyNDA5MCwicGF0aCI6Ii82MTI0NzQ4My8yODMwMTE2MjUtZGU1MzZhOGEtMDE2MS00N2E3LWEwYTItNjZlZjZkZTgxMjY2LmpwZWc_WC1BbXotQWxnb3JpdGhtPUFXUzQtSE1BQy1TSEEyNTYmWC1BbXotQ3JlZGVudGlhbD1BS0lBSVdOSllBWDRDU1ZFSDUzQSUyRjIwMjMxMjEwJTJGdXMtZWFzdC0xJTJGczMlMkZhd3M0X3JlcXVlc3QmWC1BbXotRGF0ZT0yMDIzMTIxMFQxNjAxMzBaJlgtQW16LUV4cGlyZXM9MzAwJlgtQW16LVNpZ25hdHVyZT02MGIxYmM3ZDQyYzBkOTA2ZTYyYmVmMzQ1NjY4NjM1YjY0NTUzNTM5NjE1NDZkYTIzODdhYTk4ZjZjODJmYzY2JlgtQW16LVNpZ25lZEhlYWRlcnM9aG9zdCZhY3Rvcl9pZD0wJmtleV9pZD0wJnJlcG9faWQ9MCJ9.TJ8CTfOSwR0-CHS1KLfomqgL0e4YH1luy8lSLrkv5Zg
about: QQ 群629454374
- name: 使用文档 / Documentation
url: https://docs.newapi.ai/
about: 提交 issue 前请先查阅文档,确认现有说明无法解决你的问题。
- name: 使用问题 / Usage Questions
url: https://deepwiki.com/QuantumNous/new-api
about: 使用、配置、接入等问题请优先在 DeepWiki 查询或提问。

View File

@@ -7,14 +7,23 @@ assignees: ''
---
**例行检查**
## 提交前必读(请勿删除本节)
- 文档https://docs.newapi.ai/
- 使用问题先看或先问https://deepwiki.com/QuantumNous/new-api
- 警告:删除本模板、删除小节标题或随意清空内容的 issue可能会被直接关闭重复恶意提交者可能会被 block。
**您当前的 newapi 版本**
请填写,例如:`v1.0.0`
**提交确认**
[//]: # (方框内删除已有的空格,填 x 号)
+ [ ] 我已确认目前没有类似 issue
+ [ ] 我已确认我已升级到最新版本
+ [ ] 我已完整查看过项目 README已确定现有版本无法满足需求
+ [ ] 我理解并愿意跟进此 issue协助测试和提供反馈
+ [ ] 我理解并认可上述内容,并理解项目维护者精力有限,**不遵循规则的 issue 可能会被无视或直接关闭**
+ [ ] 我已完整查看过文档 https://docs.newapi.ai/ 和项目 README已确定现有版本无法满足需求
+ [ ] 我未删除此模板中的任何引导内容或小节标题,并会按要求完整填写
+ [ ] 我理解项目维护者精力有限,不遵循模板要求的 issue 可能会被无视或直接关闭
**功能描述**

View File

@@ -7,16 +7,24 @@ assignees: ''
---
**Routine Checks**
## Read This First (Do Not Remove This Section)
- Docs: https://docs.newapi.ai/
- Usage questions first: https://deepwiki.com/QuantumNous/new-api
- Warning: issues with this template removed, section headings deleted, or content cleared may be closed directly. Repeated abusive submissions may result in a block.
**Your current newapi version**
Please fill this in, for example: `v1.0.0`
**Submission Checks**
[//]: # (Remove the space in the box and fill with an x)
+ [ ] I have confirmed there are no similar issues currently
+ [ ] I have confirmed I have upgraded to the latest version
+ [ ] I have thoroughly read the project README and confirmed the current version cannot meet my needs
+ [ ] I understand and am willing to follow up on this issue, assist with testing and provide feedback
+ [ ] I understand and acknowledge the above, and understand that project maintainers have limited time and energy, **issues that do not follow the rules may be ignored or closed directly**
+ [ ] I have confirmed there are no similar issues
+ [ ] I have thoroughly read the docs at https://docs.newapi.ai/ and the project README, and confirmed the current version cannot meet my needs
+ [ ] I have not removed any guidance or section headings from this template and will complete it as requested
+ [ ] I understand that maintainers have limited time and issues that do not follow this template may be ignored or closed directly
**Feature Description**
**Use Case**

2
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.idea
.vscode
.zed
.history
upload
*.exe
*.db
@@ -20,6 +21,7 @@ tiktoken_cache
.cache
web/bun.lock
plans
.claude
electron/node_modules
electron/dist

View File

@@ -13,6 +13,7 @@ import (
"github.com/QuantumNous/new-api/constant"
"github.com/QuantumNous/new-api/dto"
"github.com/QuantumNous/new-api/model"
relaychannel "github.com/QuantumNous/new-api/relay/channel"
"github.com/QuantumNous/new-api/relay/channel/gemini"
"github.com/QuantumNous/new-api/relay/channel/ollama"
"github.com/QuantumNous/new-api/service"
@@ -183,6 +184,9 @@ func buildFetchModelsHeaders(channel *model.Channel, key string) (http.Header, e
headerOverride := channel.GetHeaderOverride()
for k, v := range headerOverride {
if relaychannel.IsHeaderPassthroughRuleKey(k) {
continue
}
str, ok := v.(string)
if !ok {
return nil, fmt.Errorf("invalid header override for key %s", k)
@@ -209,157 +213,14 @@ func FetchUpstreamModels(c *gin.Context) {
return
}
baseURL := constant.ChannelBaseURLs[channel.Type]
if channel.GetBaseURL() != "" {
baseURL = channel.GetBaseURL()
}
// 对于 Ollama 渠道,使用特殊处理
if channel.Type == constant.ChannelTypeOllama {
key := strings.Split(channel.Key, "\n")[0]
models, err := ollama.FetchOllamaModels(baseURL, key)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": fmt.Sprintf("获取Ollama模型失败: %s", err.Error()),
})
return
}
result := OpenAIModelsResponse{
Data: make([]OpenAIModel, 0, len(models)),
}
for _, modelInfo := range models {
metadata := map[string]any{}
if modelInfo.Size > 0 {
metadata["size"] = modelInfo.Size
}
if modelInfo.Digest != "" {
metadata["digest"] = modelInfo.Digest
}
if modelInfo.ModifiedAt != "" {
metadata["modified_at"] = modelInfo.ModifiedAt
}
details := modelInfo.Details
if details.ParentModel != "" || details.Format != "" || details.Family != "" || len(details.Families) > 0 || details.ParameterSize != "" || details.QuantizationLevel != "" {
metadata["details"] = modelInfo.Details
}
if len(metadata) == 0 {
metadata = nil
}
result.Data = append(result.Data, OpenAIModel{
ID: modelInfo.Name,
Object: "model",
Created: 0,
OwnedBy: "ollama",
Metadata: metadata,
})
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"data": result.Data,
})
return
}
// 对于 Gemini 渠道,使用特殊处理
if channel.Type == constant.ChannelTypeGemini {
// 获取用于请求的可用密钥(多密钥渠道优先使用启用状态的密钥)
key, _, apiErr := channel.GetNextEnabledKey()
if apiErr != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": fmt.Sprintf("获取渠道密钥失败: %s", apiErr.Error()),
})
return
}
key = strings.TrimSpace(key)
models, err := gemini.FetchGeminiModels(baseURL, key, channel.GetSetting().Proxy)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": fmt.Sprintf("获取Gemini模型失败: %s", err.Error()),
})
return
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": models,
})
return
}
var url string
switch channel.Type {
case constant.ChannelTypeAli:
url = fmt.Sprintf("%s/compatible-mode/v1/models", baseURL)
case constant.ChannelTypeZhipu_v4:
if plan, ok := constant.ChannelSpecialBases[baseURL]; ok && plan.OpenAIBaseURL != "" {
url = fmt.Sprintf("%s/models", plan.OpenAIBaseURL)
} else {
url = fmt.Sprintf("%s/api/paas/v4/models", baseURL)
}
case constant.ChannelTypeVolcEngine:
if plan, ok := constant.ChannelSpecialBases[baseURL]; ok && plan.OpenAIBaseURL != "" {
url = fmt.Sprintf("%s/v1/models", plan.OpenAIBaseURL)
} else {
url = fmt.Sprintf("%s/v1/models", baseURL)
}
case constant.ChannelTypeMoonshot:
if plan, ok := constant.ChannelSpecialBases[baseURL]; ok && plan.OpenAIBaseURL != "" {
url = fmt.Sprintf("%s/models", plan.OpenAIBaseURL)
} else {
url = fmt.Sprintf("%s/v1/models", baseURL)
}
default:
url = fmt.Sprintf("%s/v1/models", baseURL)
}
// 获取用于请求的可用密钥(多密钥渠道优先使用启用状态的密钥)
key, _, apiErr := channel.GetNextEnabledKey()
if apiErr != nil {
ids, err := fetchChannelUpstreamModelIDs(channel)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": fmt.Sprintf("获取渠道密钥失败: %s", apiErr.Error()),
"message": fmt.Sprintf("获取模型列表失败: %s", err.Error()),
})
return
}
key = strings.TrimSpace(key)
headers, err := buildFetchModelsHeaders(channel, key)
if err != nil {
common.ApiError(c, err)
return
}
body, err := GetResponseBody("GET", url, channel, headers)
if err != nil {
common.ApiError(c, err)
return
}
var result OpenAIModelsResponse
if err = json.Unmarshal(body, &result); err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": fmt.Sprintf("解析响应失败: %s", err.Error()),
})
return
}
var ids []string
for _, model := range result.Data {
id := model.ID
if channel.Type == constant.ChannelTypeGemini {
id = strings.TrimPrefix(id, "models/")
}
ids = append(ids, id)
}
c.JSON(http.StatusOK, gin.H{
"success": true,

View File

@@ -0,0 +1,975 @@
package controller
import (
"fmt"
"net/http"
"slices"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/QuantumNous/new-api/common"
"github.com/QuantumNous/new-api/constant"
"github.com/QuantumNous/new-api/dto"
"github.com/QuantumNous/new-api/model"
"github.com/QuantumNous/new-api/relay/channel/gemini"
"github.com/QuantumNous/new-api/relay/channel/ollama"
"github.com/QuantumNous/new-api/service"
"github.com/gin-gonic/gin"
"github.com/samber/lo"
)
const (
channelUpstreamModelUpdateTaskDefaultIntervalMinutes = 30
channelUpstreamModelUpdateTaskBatchSize = 100
channelUpstreamModelUpdateMinCheckIntervalSeconds = 300
channelUpstreamModelUpdateNotifySuppressWindowSeconds = 86400
channelUpstreamModelUpdateNotifyMaxChannelDetails = 8
channelUpstreamModelUpdateNotifyMaxModelDetails = 12
channelUpstreamModelUpdateNotifyMaxFailedChannelIDs = 10
)
var (
channelUpstreamModelUpdateTaskOnce sync.Once
channelUpstreamModelUpdateTaskRunning atomic.Bool
channelUpstreamModelUpdateNotifyState = struct {
sync.Mutex
lastNotifiedAt int64
lastChangedChannels int
lastFailedChannels int
}{}
)
type applyChannelUpstreamModelUpdatesRequest struct {
ID int `json:"id"`
AddModels []string `json:"add_models"`
RemoveModels []string `json:"remove_models"`
IgnoreModels []string `json:"ignore_models"`
}
type applyAllChannelUpstreamModelUpdatesResult struct {
ChannelID int `json:"channel_id"`
ChannelName string `json:"channel_name"`
AddedModels []string `json:"added_models"`
RemovedModels []string `json:"removed_models"`
RemainingModels []string `json:"remaining_models"`
RemainingRemoveModels []string `json:"remaining_remove_models"`
}
type detectChannelUpstreamModelUpdatesResult struct {
ChannelID int `json:"channel_id"`
ChannelName string `json:"channel_name"`
AddModels []string `json:"add_models"`
RemoveModels []string `json:"remove_models"`
LastCheckTime int64 `json:"last_check_time"`
AutoAddedModels int `json:"auto_added_models"`
}
type upstreamModelUpdateChannelSummary struct {
ChannelName string
AddCount int
RemoveCount int
}
func normalizeModelNames(models []string) []string {
return lo.Uniq(lo.FilterMap(models, func(model string, _ int) (string, bool) {
trimmed := strings.TrimSpace(model)
return trimmed, trimmed != ""
}))
}
func mergeModelNames(base []string, appended []string) []string {
merged := normalizeModelNames(base)
seen := make(map[string]struct{}, len(merged))
for _, model := range merged {
seen[model] = struct{}{}
}
for _, model := range normalizeModelNames(appended) {
if _, ok := seen[model]; ok {
continue
}
seen[model] = struct{}{}
merged = append(merged, model)
}
return merged
}
func subtractModelNames(base []string, removed []string) []string {
removeSet := make(map[string]struct{}, len(removed))
for _, model := range normalizeModelNames(removed) {
removeSet[model] = struct{}{}
}
return lo.Filter(normalizeModelNames(base), func(model string, _ int) bool {
_, ok := removeSet[model]
return !ok
})
}
func intersectModelNames(base []string, allowed []string) []string {
allowedSet := make(map[string]struct{}, len(allowed))
for _, model := range normalizeModelNames(allowed) {
allowedSet[model] = struct{}{}
}
return lo.Filter(normalizeModelNames(base), func(model string, _ int) bool {
_, ok := allowedSet[model]
return ok
})
}
func applySelectedModelChanges(originModels []string, addModels []string, removeModels []string) []string {
// Add wins when the same model appears in both selected lists.
normalizedAdd := normalizeModelNames(addModels)
normalizedRemove := subtractModelNames(normalizeModelNames(removeModels), normalizedAdd)
return subtractModelNames(mergeModelNames(originModels, normalizedAdd), normalizedRemove)
}
func normalizeChannelModelMapping(channel *model.Channel) map[string]string {
if channel == nil || channel.ModelMapping == nil {
return nil
}
rawMapping := strings.TrimSpace(*channel.ModelMapping)
if rawMapping == "" || rawMapping == "{}" {
return nil
}
parsed := make(map[string]string)
if err := common.UnmarshalJsonStr(rawMapping, &parsed); err != nil {
return nil
}
normalized := make(map[string]string, len(parsed))
for source, target := range parsed {
normalizedSource := strings.TrimSpace(source)
normalizedTarget := strings.TrimSpace(target)
if normalizedSource == "" || normalizedTarget == "" {
continue
}
normalized[normalizedSource] = normalizedTarget
}
if len(normalized) == 0 {
return nil
}
return normalized
}
func collectPendingUpstreamModelChangesFromModels(
localModels []string,
upstreamModels []string,
ignoredModels []string,
modelMapping map[string]string,
) (pendingAddModels []string, pendingRemoveModels []string) {
localSet := make(map[string]struct{})
localModels = normalizeModelNames(localModels)
upstreamModels = normalizeModelNames(upstreamModels)
for _, modelName := range localModels {
localSet[modelName] = struct{}{}
}
upstreamSet := make(map[string]struct{}, len(upstreamModels))
for _, modelName := range upstreamModels {
upstreamSet[modelName] = struct{}{}
}
ignoredSet := make(map[string]struct{})
for _, modelName := range normalizeModelNames(ignoredModels) {
ignoredSet[modelName] = struct{}{}
}
redirectSourceSet := make(map[string]struct{}, len(modelMapping))
redirectTargetSet := make(map[string]struct{}, len(modelMapping))
for source, target := range modelMapping {
redirectSourceSet[source] = struct{}{}
redirectTargetSet[target] = struct{}{}
}
coveredUpstreamSet := make(map[string]struct{}, len(localSet)+len(redirectTargetSet))
for modelName := range localSet {
coveredUpstreamSet[modelName] = struct{}{}
}
for modelName := range redirectTargetSet {
coveredUpstreamSet[modelName] = struct{}{}
}
pendingAdd := lo.Filter(upstreamModels, func(modelName string, _ int) bool {
if _, ok := coveredUpstreamSet[modelName]; ok {
return false
}
if _, ok := ignoredSet[modelName]; ok {
return false
}
return true
})
pendingRemove := lo.Filter(localModels, func(modelName string, _ int) bool {
// Redirect source models are virtual aliases and should not be removed
// only because they are absent from upstream model list.
if _, ok := redirectSourceSet[modelName]; ok {
return false
}
_, ok := upstreamSet[modelName]
return !ok
})
return normalizeModelNames(pendingAdd), normalizeModelNames(pendingRemove)
}
func collectPendingUpstreamModelChanges(channel *model.Channel, settings dto.ChannelOtherSettings) (pendingAddModels []string, pendingRemoveModels []string, err error) {
upstreamModels, err := fetchChannelUpstreamModelIDs(channel)
if err != nil {
return nil, nil, err
}
pendingAddModels, pendingRemoveModels = collectPendingUpstreamModelChangesFromModels(
channel.GetModels(),
upstreamModels,
settings.UpstreamModelUpdateIgnoredModels,
normalizeChannelModelMapping(channel),
)
return pendingAddModels, pendingRemoveModels, nil
}
func getUpstreamModelUpdateMinCheckIntervalSeconds() int64 {
interval := int64(common.GetEnvOrDefault(
"CHANNEL_UPSTREAM_MODEL_UPDATE_MIN_CHECK_INTERVAL_SECONDS",
channelUpstreamModelUpdateMinCheckIntervalSeconds,
))
if interval < 0 {
return channelUpstreamModelUpdateMinCheckIntervalSeconds
}
return interval
}
func fetchChannelUpstreamModelIDs(channel *model.Channel) ([]string, error) {
baseURL := constant.ChannelBaseURLs[channel.Type]
if channel.GetBaseURL() != "" {
baseURL = channel.GetBaseURL()
}
if channel.Type == constant.ChannelTypeOllama {
key := strings.TrimSpace(strings.Split(channel.Key, "\n")[0])
models, err := ollama.FetchOllamaModels(baseURL, key)
if err != nil {
return nil, err
}
return normalizeModelNames(lo.Map(models, func(item ollama.OllamaModel, _ int) string {
return item.Name
})), nil
}
if channel.Type == constant.ChannelTypeGemini {
key, _, apiErr := channel.GetNextEnabledKey()
if apiErr != nil {
return nil, fmt.Errorf("获取渠道密钥失败: %w", apiErr)
}
key = strings.TrimSpace(key)
models, err := gemini.FetchGeminiModels(baseURL, key, channel.GetSetting().Proxy)
if err != nil {
return nil, err
}
return normalizeModelNames(models), nil
}
var url string
switch channel.Type {
case constant.ChannelTypeAli:
url = fmt.Sprintf("%s/compatible-mode/v1/models", baseURL)
case constant.ChannelTypeZhipu_v4:
if plan, ok := constant.ChannelSpecialBases[baseURL]; ok && plan.OpenAIBaseURL != "" {
url = fmt.Sprintf("%s/models", plan.OpenAIBaseURL)
} else {
url = fmt.Sprintf("%s/api/paas/v4/models", baseURL)
}
case constant.ChannelTypeVolcEngine:
if plan, ok := constant.ChannelSpecialBases[baseURL]; ok && plan.OpenAIBaseURL != "" {
url = fmt.Sprintf("%s/v1/models", plan.OpenAIBaseURL)
} else {
url = fmt.Sprintf("%s/v1/models", baseURL)
}
case constant.ChannelTypeMoonshot:
if plan, ok := constant.ChannelSpecialBases[baseURL]; ok && plan.OpenAIBaseURL != "" {
url = fmt.Sprintf("%s/models", plan.OpenAIBaseURL)
} else {
url = fmt.Sprintf("%s/v1/models", baseURL)
}
default:
url = fmt.Sprintf("%s/v1/models", baseURL)
}
key, _, apiErr := channel.GetNextEnabledKey()
if apiErr != nil {
return nil, fmt.Errorf("获取渠道密钥失败: %w", apiErr)
}
key = strings.TrimSpace(key)
headers, err := buildFetchModelsHeaders(channel, key)
if err != nil {
return nil, err
}
body, err := GetResponseBody(http.MethodGet, url, channel, headers)
if err != nil {
return nil, err
}
var result OpenAIModelsResponse
if err := common.Unmarshal(body, &result); err != nil {
return nil, err
}
ids := lo.Map(result.Data, func(item OpenAIModel, _ int) string {
if channel.Type == constant.ChannelTypeGemini {
return strings.TrimPrefix(item.ID, "models/")
}
return item.ID
})
return normalizeModelNames(ids), nil
}
func updateChannelUpstreamModelSettings(channel *model.Channel, settings dto.ChannelOtherSettings, updateModels bool) error {
channel.SetOtherSettings(settings)
updates := map[string]interface{}{
"settings": channel.OtherSettings,
}
if updateModels {
updates["models"] = channel.Models
}
return model.DB.Model(&model.Channel{}).Where("id = ?", channel.Id).Updates(updates).Error
}
func checkAndPersistChannelUpstreamModelUpdates(
channel *model.Channel,
settings *dto.ChannelOtherSettings,
force bool,
allowAutoApply bool,
) (modelsChanged bool, autoAdded int, err error) {
now := common.GetTimestamp()
if !force {
minInterval := getUpstreamModelUpdateMinCheckIntervalSeconds()
if settings.UpstreamModelUpdateLastCheckTime > 0 &&
now-settings.UpstreamModelUpdateLastCheckTime < minInterval {
return false, 0, nil
}
}
pendingAddModels, pendingRemoveModels, fetchErr := collectPendingUpstreamModelChanges(channel, *settings)
settings.UpstreamModelUpdateLastCheckTime = now
if fetchErr != nil {
if err = updateChannelUpstreamModelSettings(channel, *settings, false); err != nil {
return false, 0, err
}
return false, 0, fetchErr
}
if allowAutoApply && settings.UpstreamModelUpdateAutoSyncEnabled && len(pendingAddModels) > 0 {
originModels := normalizeModelNames(channel.GetModels())
mergedModels := mergeModelNames(originModels, pendingAddModels)
if len(mergedModels) > len(originModels) {
channel.Models = strings.Join(mergedModels, ",")
autoAdded = len(mergedModels) - len(originModels)
modelsChanged = true
}
settings.UpstreamModelUpdateLastDetectedModels = []string{}
} else {
settings.UpstreamModelUpdateLastDetectedModels = pendingAddModels
}
settings.UpstreamModelUpdateLastRemovedModels = pendingRemoveModels
if err = updateChannelUpstreamModelSettings(channel, *settings, modelsChanged); err != nil {
return false, autoAdded, err
}
if modelsChanged {
if err = channel.UpdateAbilities(nil); err != nil {
return true, autoAdded, err
}
}
return modelsChanged, autoAdded, nil
}
func refreshChannelRuntimeCache() {
if common.MemoryCacheEnabled {
func() {
defer func() {
if r := recover(); r != nil {
common.SysLog(fmt.Sprintf("InitChannelCache panic: %v", r))
}
}()
model.InitChannelCache()
}()
}
service.ResetProxyClientCache()
}
func shouldSendUpstreamModelUpdateNotification(now int64, changedChannels int, failedChannels int) bool {
if changedChannels <= 0 && failedChannels <= 0 {
return true
}
channelUpstreamModelUpdateNotifyState.Lock()
defer channelUpstreamModelUpdateNotifyState.Unlock()
if channelUpstreamModelUpdateNotifyState.lastNotifiedAt > 0 &&
now-channelUpstreamModelUpdateNotifyState.lastNotifiedAt < channelUpstreamModelUpdateNotifySuppressWindowSeconds &&
channelUpstreamModelUpdateNotifyState.lastChangedChannels == changedChannels &&
channelUpstreamModelUpdateNotifyState.lastFailedChannels == failedChannels {
return false
}
channelUpstreamModelUpdateNotifyState.lastNotifiedAt = now
channelUpstreamModelUpdateNotifyState.lastChangedChannels = changedChannels
channelUpstreamModelUpdateNotifyState.lastFailedChannels = failedChannels
return true
}
func buildUpstreamModelUpdateTaskNotificationContent(
checkedChannels int,
changedChannels int,
detectedAddModels int,
detectedRemoveModels int,
autoAddedModels int,
failedChannelIDs []int,
channelSummaries []upstreamModelUpdateChannelSummary,
addModelSamples []string,
removeModelSamples []string,
) string {
var builder strings.Builder
failedChannels := len(failedChannelIDs)
builder.WriteString(fmt.Sprintf(
"上游模型巡检摘要:检测渠道 %d 个,发现变更 %d 个,新增 %d 个,删除 %d 个,自动同步新增 %d 个,失败 %d 个。",
checkedChannels,
changedChannels,
detectedAddModels,
detectedRemoveModels,
autoAddedModels,
failedChannels,
))
if len(channelSummaries) > 0 {
displayCount := min(len(channelSummaries), channelUpstreamModelUpdateNotifyMaxChannelDetails)
builder.WriteString(fmt.Sprintf("\n\n变更渠道明细展示 %d/%d", displayCount, len(channelSummaries)))
for _, summary := range channelSummaries[:displayCount] {
builder.WriteString(fmt.Sprintf("\n- %s (+%d / -%d)", summary.ChannelName, summary.AddCount, summary.RemoveCount))
}
if len(channelSummaries) > displayCount {
builder.WriteString(fmt.Sprintf("\n- 其余 %d 个渠道已省略", len(channelSummaries)-displayCount))
}
}
normalizedAddModelSamples := normalizeModelNames(addModelSamples)
if len(normalizedAddModelSamples) > 0 {
displayCount := min(len(normalizedAddModelSamples), channelUpstreamModelUpdateNotifyMaxModelDetails)
builder.WriteString(fmt.Sprintf("\n\n新增模型示例展示 %d/%d%s",
displayCount,
len(normalizedAddModelSamples),
strings.Join(normalizedAddModelSamples[:displayCount], ", "),
))
if len(normalizedAddModelSamples) > displayCount {
builder.WriteString(fmt.Sprintf("(其余 %d 个已省略)", len(normalizedAddModelSamples)-displayCount))
}
}
normalizedRemoveModelSamples := normalizeModelNames(removeModelSamples)
if len(normalizedRemoveModelSamples) > 0 {
displayCount := min(len(normalizedRemoveModelSamples), channelUpstreamModelUpdateNotifyMaxModelDetails)
builder.WriteString(fmt.Sprintf("\n\n删除模型示例展示 %d/%d%s",
displayCount,
len(normalizedRemoveModelSamples),
strings.Join(normalizedRemoveModelSamples[:displayCount], ", "),
))
if len(normalizedRemoveModelSamples) > displayCount {
builder.WriteString(fmt.Sprintf("(其余 %d 个已省略)", len(normalizedRemoveModelSamples)-displayCount))
}
}
if failedChannels > 0 {
displayCount := min(failedChannels, channelUpstreamModelUpdateNotifyMaxFailedChannelIDs)
displayIDs := lo.Map(failedChannelIDs[:displayCount], func(channelID int, _ int) string {
return fmt.Sprintf("%d", channelID)
})
builder.WriteString(fmt.Sprintf(
"\n\n失败渠道 ID展示 %d/%d%s",
displayCount,
failedChannels,
strings.Join(displayIDs, ", "),
))
if failedChannels > displayCount {
builder.WriteString(fmt.Sprintf("(其余 %d 个已省略)", failedChannels-displayCount))
}
}
return builder.String()
}
func runChannelUpstreamModelUpdateTaskOnce() {
if !channelUpstreamModelUpdateTaskRunning.CompareAndSwap(false, true) {
return
}
defer channelUpstreamModelUpdateTaskRunning.Store(false)
checkedChannels := 0
failedChannels := 0
failedChannelIDs := make([]int, 0)
changedChannels := 0
detectedAddModels := 0
detectedRemoveModels := 0
autoAddedModels := 0
channelSummaries := make([]upstreamModelUpdateChannelSummary, 0)
addModelSamples := make([]string, 0)
removeModelSamples := make([]string, 0)
refreshNeeded := false
lastID := 0
for {
var channels []*model.Channel
query := model.DB.
Select("id", "name", "type", "key", "status", "base_url", "models", "settings", "setting", "other", "group", "priority", "weight", "tag", "channel_info", "header_override").
Where("status = ?", common.ChannelStatusEnabled).
Order("id asc").
Limit(channelUpstreamModelUpdateTaskBatchSize)
if lastID > 0 {
query = query.Where("id > ?", lastID)
}
err := query.Find(&channels).Error
if err != nil {
common.SysLog(fmt.Sprintf("upstream model update task query failed: %v", err))
break
}
if len(channels) == 0 {
break
}
lastID = channels[len(channels)-1].Id
for _, channel := range channels {
if channel == nil {
continue
}
settings := channel.GetOtherSettings()
if !settings.UpstreamModelUpdateCheckEnabled {
continue
}
checkedChannels++
modelsChanged, autoAdded, err := checkAndPersistChannelUpstreamModelUpdates(channel, &settings, false, true)
if err != nil {
failedChannels++
failedChannelIDs = append(failedChannelIDs, channel.Id)
common.SysLog(fmt.Sprintf("upstream model update check failed: channel_id=%d channel_name=%s err=%v", channel.Id, channel.Name, err))
continue
}
currentAddModels := normalizeModelNames(settings.UpstreamModelUpdateLastDetectedModels)
currentRemoveModels := normalizeModelNames(settings.UpstreamModelUpdateLastRemovedModels)
currentAddCount := len(currentAddModels) + autoAdded
currentRemoveCount := len(currentRemoveModels)
detectedAddModels += currentAddCount
detectedRemoveModels += currentRemoveCount
if currentAddCount > 0 || currentRemoveCount > 0 {
changedChannels++
channelSummaries = append(channelSummaries, upstreamModelUpdateChannelSummary{
ChannelName: channel.Name,
AddCount: currentAddCount,
RemoveCount: currentRemoveCount,
})
}
addModelSamples = mergeModelNames(addModelSamples, currentAddModels)
removeModelSamples = mergeModelNames(removeModelSamples, currentRemoveModels)
if modelsChanged {
refreshNeeded = true
}
autoAddedModels += autoAdded
if common.RequestInterval > 0 {
time.Sleep(common.RequestInterval)
}
}
if len(channels) < channelUpstreamModelUpdateTaskBatchSize {
break
}
}
if refreshNeeded {
refreshChannelRuntimeCache()
}
if checkedChannels > 0 || common.DebugEnabled {
common.SysLog(fmt.Sprintf(
"upstream model update task done: checked_channels=%d changed_channels=%d detected_add_models=%d detected_remove_models=%d failed_channels=%d auto_added_models=%d",
checkedChannels,
changedChannels,
detectedAddModels,
detectedRemoveModels,
failedChannels,
autoAddedModels,
))
}
if changedChannels > 0 || failedChannels > 0 {
now := common.GetTimestamp()
if !shouldSendUpstreamModelUpdateNotification(now, changedChannels, failedChannels) {
common.SysLog(fmt.Sprintf(
"upstream model update notification skipped in 24h window: changed_channels=%d failed_channels=%d",
changedChannels,
failedChannels,
))
return
}
service.NotifyUpstreamModelUpdateWatchers(
"上游模型巡检通知",
buildUpstreamModelUpdateTaskNotificationContent(
checkedChannels,
changedChannels,
detectedAddModels,
detectedRemoveModels,
autoAddedModels,
failedChannelIDs,
channelSummaries,
addModelSamples,
removeModelSamples,
),
)
}
}
func StartChannelUpstreamModelUpdateTask() {
channelUpstreamModelUpdateTaskOnce.Do(func() {
if !common.IsMasterNode {
return
}
if !common.GetEnvOrDefaultBool("CHANNEL_UPSTREAM_MODEL_UPDATE_TASK_ENABLED", true) {
common.SysLog("upstream model update task disabled by CHANNEL_UPSTREAM_MODEL_UPDATE_TASK_ENABLED")
return
}
intervalMinutes := common.GetEnvOrDefault(
"CHANNEL_UPSTREAM_MODEL_UPDATE_TASK_INTERVAL_MINUTES",
channelUpstreamModelUpdateTaskDefaultIntervalMinutes,
)
if intervalMinutes < 1 {
intervalMinutes = channelUpstreamModelUpdateTaskDefaultIntervalMinutes
}
interval := time.Duration(intervalMinutes) * time.Minute
go func() {
common.SysLog(fmt.Sprintf("upstream model update task started: interval=%s", interval))
runChannelUpstreamModelUpdateTaskOnce()
ticker := time.NewTicker(interval)
defer ticker.Stop()
for range ticker.C {
runChannelUpstreamModelUpdateTaskOnce()
}
}()
})
}
func ApplyChannelUpstreamModelUpdates(c *gin.Context) {
var req applyChannelUpstreamModelUpdatesRequest
if err := c.ShouldBindJSON(&req); err != nil {
common.ApiError(c, err)
return
}
if req.ID <= 0 {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "invalid channel id",
})
return
}
channel, err := model.GetChannelById(req.ID, true)
if err != nil {
common.ApiError(c, err)
return
}
beforeSettings := channel.GetOtherSettings()
ignoredModels := intersectModelNames(req.IgnoreModels, beforeSettings.UpstreamModelUpdateLastDetectedModels)
addedModels, removedModels, remainingModels, remainingRemoveModels, modelsChanged, err := applyChannelUpstreamModelUpdates(
channel,
req.AddModels,
req.IgnoreModels,
req.RemoveModels,
)
if err != nil {
common.ApiError(c, err)
return
}
if modelsChanged {
refreshChannelRuntimeCache()
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": gin.H{
"id": channel.Id,
"added_models": addedModels,
"removed_models": removedModels,
"ignored_models": ignoredModels,
"remaining_models": remainingModels,
"remaining_remove_models": remainingRemoveModels,
"models": channel.Models,
"settings": channel.OtherSettings,
},
})
}
func DetectChannelUpstreamModelUpdates(c *gin.Context) {
var req applyChannelUpstreamModelUpdatesRequest
if err := c.ShouldBindJSON(&req); err != nil {
common.ApiError(c, err)
return
}
if req.ID <= 0 {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "invalid channel id",
})
return
}
channel, err := model.GetChannelById(req.ID, true)
if err != nil {
common.ApiError(c, err)
return
}
settings := channel.GetOtherSettings()
modelsChanged, autoAdded, err := checkAndPersistChannelUpstreamModelUpdates(channel, &settings, true, false)
if err != nil {
common.ApiError(c, err)
return
}
if modelsChanged {
refreshChannelRuntimeCache()
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": detectChannelUpstreamModelUpdatesResult{
ChannelID: channel.Id,
ChannelName: channel.Name,
AddModels: normalizeModelNames(settings.UpstreamModelUpdateLastDetectedModels),
RemoveModels: normalizeModelNames(settings.UpstreamModelUpdateLastRemovedModels),
LastCheckTime: settings.UpstreamModelUpdateLastCheckTime,
AutoAddedModels: autoAdded,
},
})
}
func applyChannelUpstreamModelUpdates(
channel *model.Channel,
addModelsInput []string,
ignoreModelsInput []string,
removeModelsInput []string,
) (
addedModels []string,
removedModels []string,
remainingModels []string,
remainingRemoveModels []string,
modelsChanged bool,
err error,
) {
settings := channel.GetOtherSettings()
pendingAddModels := normalizeModelNames(settings.UpstreamModelUpdateLastDetectedModels)
pendingRemoveModels := normalizeModelNames(settings.UpstreamModelUpdateLastRemovedModels)
addModels := intersectModelNames(addModelsInput, pendingAddModels)
ignoreModels := intersectModelNames(ignoreModelsInput, pendingAddModels)
removeModels := intersectModelNames(removeModelsInput, pendingRemoveModels)
removeModels = subtractModelNames(removeModels, addModels)
originModels := normalizeModelNames(channel.GetModels())
nextModels := applySelectedModelChanges(originModels, addModels, removeModels)
modelsChanged = !slices.Equal(originModels, nextModels)
if modelsChanged {
channel.Models = strings.Join(nextModels, ",")
}
settings.UpstreamModelUpdateIgnoredModels = mergeModelNames(settings.UpstreamModelUpdateIgnoredModels, ignoreModels)
if len(addModels) > 0 {
settings.UpstreamModelUpdateIgnoredModels = subtractModelNames(settings.UpstreamModelUpdateIgnoredModels, addModels)
}
remainingModels = subtractModelNames(pendingAddModels, append(addModels, ignoreModels...))
remainingRemoveModels = subtractModelNames(pendingRemoveModels, removeModels)
settings.UpstreamModelUpdateLastDetectedModels = remainingModels
settings.UpstreamModelUpdateLastRemovedModels = remainingRemoveModels
settings.UpstreamModelUpdateLastCheckTime = common.GetTimestamp()
if err := updateChannelUpstreamModelSettings(channel, settings, modelsChanged); err != nil {
return nil, nil, nil, nil, false, err
}
if modelsChanged {
if err := channel.UpdateAbilities(nil); err != nil {
return addModels, removeModels, remainingModels, remainingRemoveModels, true, err
}
}
return addModels, removeModels, remainingModels, remainingRemoveModels, modelsChanged, nil
}
func collectPendingApplyUpstreamModelChanges(settings dto.ChannelOtherSettings) (pendingAddModels []string, pendingRemoveModels []string) {
return normalizeModelNames(settings.UpstreamModelUpdateLastDetectedModels), normalizeModelNames(settings.UpstreamModelUpdateLastRemovedModels)
}
func findEnabledChannelsAfterID(lastID int, batchSize int) ([]*model.Channel, error) {
var channels []*model.Channel
query := model.DB.
Select("id", "name", "type", "key", "status", "base_url", "models", "settings", "setting", "other", "group", "priority", "weight", "tag", "channel_info", "header_override").
Where("status = ?", common.ChannelStatusEnabled).
Order("id asc").
Limit(batchSize)
if lastID > 0 {
query = query.Where("id > ?", lastID)
}
return channels, query.Find(&channels).Error
}
func ApplyAllChannelUpstreamModelUpdates(c *gin.Context) {
results := make([]applyAllChannelUpstreamModelUpdatesResult, 0)
failed := make([]int, 0)
refreshNeeded := false
addedModelCount := 0
removedModelCount := 0
lastID := 0
for {
channels, err := findEnabledChannelsAfterID(lastID, channelUpstreamModelUpdateTaskBatchSize)
if err != nil {
common.ApiError(c, err)
return
}
if len(channels) == 0 {
break
}
lastID = channels[len(channels)-1].Id
for _, channel := range channels {
if channel == nil {
continue
}
settings := channel.GetOtherSettings()
if !settings.UpstreamModelUpdateCheckEnabled {
continue
}
pendingAddModels, pendingRemoveModels := collectPendingApplyUpstreamModelChanges(settings)
if len(pendingAddModels) == 0 && len(pendingRemoveModels) == 0 {
continue
}
addedModels, removedModels, remainingModels, remainingRemoveModels, modelsChanged, err := applyChannelUpstreamModelUpdates(
channel,
pendingAddModels,
nil,
pendingRemoveModels,
)
if err != nil {
failed = append(failed, channel.Id)
continue
}
if modelsChanged {
refreshNeeded = true
}
addedModelCount += len(addedModels)
removedModelCount += len(removedModels)
results = append(results, applyAllChannelUpstreamModelUpdatesResult{
ChannelID: channel.Id,
ChannelName: channel.Name,
AddedModels: addedModels,
RemovedModels: removedModels,
RemainingModels: remainingModels,
RemainingRemoveModels: remainingRemoveModels,
})
}
if len(channels) < channelUpstreamModelUpdateTaskBatchSize {
break
}
}
if refreshNeeded {
refreshChannelRuntimeCache()
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": gin.H{
"processed_channels": len(results),
"added_models": addedModelCount,
"removed_models": removedModelCount,
"failed_channel_ids": failed,
"results": results,
},
})
}
func DetectAllChannelUpstreamModelUpdates(c *gin.Context) {
results := make([]detectChannelUpstreamModelUpdatesResult, 0)
failed := make([]int, 0)
detectedAddCount := 0
detectedRemoveCount := 0
refreshNeeded := false
lastID := 0
for {
channels, err := findEnabledChannelsAfterID(lastID, channelUpstreamModelUpdateTaskBatchSize)
if err != nil {
common.ApiError(c, err)
return
}
if len(channels) == 0 {
break
}
lastID = channels[len(channels)-1].Id
for _, channel := range channels {
if channel == nil {
continue
}
settings := channel.GetOtherSettings()
if !settings.UpstreamModelUpdateCheckEnabled {
continue
}
modelsChanged, autoAdded, err := checkAndPersistChannelUpstreamModelUpdates(channel, &settings, true, false)
if err != nil {
failed = append(failed, channel.Id)
continue
}
if modelsChanged {
refreshNeeded = true
}
addModels := normalizeModelNames(settings.UpstreamModelUpdateLastDetectedModels)
removeModels := normalizeModelNames(settings.UpstreamModelUpdateLastRemovedModels)
detectedAddCount += len(addModels)
detectedRemoveCount += len(removeModels)
results = append(results, detectChannelUpstreamModelUpdatesResult{
ChannelID: channel.Id,
ChannelName: channel.Name,
AddModels: addModels,
RemoveModels: removeModels,
LastCheckTime: settings.UpstreamModelUpdateLastCheckTime,
AutoAddedModels: autoAdded,
})
}
if len(channels) < channelUpstreamModelUpdateTaskBatchSize {
break
}
}
if refreshNeeded {
refreshChannelRuntimeCache()
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": gin.H{
"processed_channels": len(results),
"failed_channel_ids": failed,
"detected_add_models": detectedAddCount,
"detected_remove_models": detectedRemoveCount,
"channel_detected_results": results,
},
})
}

View File

@@ -0,0 +1,167 @@
package controller
import (
"testing"
"github.com/QuantumNous/new-api/dto"
"github.com/QuantumNous/new-api/model"
"github.com/stretchr/testify/require"
)
func TestNormalizeModelNames(t *testing.T) {
result := normalizeModelNames([]string{
" gpt-4o ",
"",
"gpt-4o",
"gpt-4.1",
" ",
})
require.Equal(t, []string{"gpt-4o", "gpt-4.1"}, result)
}
func TestMergeModelNames(t *testing.T) {
result := mergeModelNames(
[]string{"gpt-4o", "gpt-4.1"},
[]string{"gpt-4.1", " gpt-4.1-mini ", "gpt-4o"},
)
require.Equal(t, []string{"gpt-4o", "gpt-4.1", "gpt-4.1-mini"}, result)
}
func TestSubtractModelNames(t *testing.T) {
result := subtractModelNames(
[]string{"gpt-4o", "gpt-4.1", "gpt-4.1-mini"},
[]string{"gpt-4.1", "not-exists"},
)
require.Equal(t, []string{"gpt-4o", "gpt-4.1-mini"}, result)
}
func TestIntersectModelNames(t *testing.T) {
result := intersectModelNames(
[]string{"gpt-4o", "gpt-4.1", "gpt-4.1", "not-exists"},
[]string{"gpt-4.1", "gpt-4o-mini", "gpt-4o"},
)
require.Equal(t, []string{"gpt-4o", "gpt-4.1"}, result)
}
func TestApplySelectedModelChanges(t *testing.T) {
t.Run("add and remove together", func(t *testing.T) {
result := applySelectedModelChanges(
[]string{"gpt-4o", "gpt-4.1", "claude-3"},
[]string{"gpt-4.1-mini"},
[]string{"claude-3"},
)
require.Equal(t, []string{"gpt-4o", "gpt-4.1", "gpt-4.1-mini"}, result)
})
t.Run("add wins when conflict with remove", func(t *testing.T) {
result := applySelectedModelChanges(
[]string{"gpt-4o"},
[]string{"gpt-4.1"},
[]string{"gpt-4.1"},
)
require.Equal(t, []string{"gpt-4o", "gpt-4.1"}, result)
})
}
func TestCollectPendingApplyUpstreamModelChanges(t *testing.T) {
settings := dto.ChannelOtherSettings{
UpstreamModelUpdateLastDetectedModels: []string{" gpt-4o ", "gpt-4o", "gpt-4.1"},
UpstreamModelUpdateLastRemovedModels: []string{" old-model ", "", "old-model"},
}
pendingAddModels, pendingRemoveModels := collectPendingApplyUpstreamModelChanges(settings)
require.Equal(t, []string{"gpt-4o", "gpt-4.1"}, pendingAddModels)
require.Equal(t, []string{"old-model"}, pendingRemoveModels)
}
func TestNormalizeChannelModelMapping(t *testing.T) {
modelMapping := `{
" alias-model ": " upstream-model ",
"": "invalid",
"invalid-target": ""
}`
channel := &model.Channel{
ModelMapping: &modelMapping,
}
result := normalizeChannelModelMapping(channel)
require.Equal(t, map[string]string{
"alias-model": "upstream-model",
}, result)
}
func TestCollectPendingUpstreamModelChangesFromModels_WithModelMapping(t *testing.T) {
pendingAddModels, pendingRemoveModels := collectPendingUpstreamModelChangesFromModels(
[]string{"alias-model", "gpt-4o", "stale-model"},
[]string{"gpt-4o", "gpt-4.1", "mapped-target"},
[]string{"gpt-4.1"},
map[string]string{
"alias-model": "mapped-target",
},
)
require.Equal(t, []string{}, pendingAddModels)
require.Equal(t, []string{"stale-model"}, pendingRemoveModels)
}
func TestBuildUpstreamModelUpdateTaskNotificationContent_OmitOverflowDetails(t *testing.T) {
channelSummaries := make([]upstreamModelUpdateChannelSummary, 0, 12)
for i := 0; i < 12; i++ {
channelSummaries = append(channelSummaries, upstreamModelUpdateChannelSummary{
ChannelName: "channel-" + string(rune('A'+i)),
AddCount: i + 1,
RemoveCount: i,
})
}
content := buildUpstreamModelUpdateTaskNotificationContent(
24,
12,
56,
21,
9,
[]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12},
channelSummaries,
[]string{
"gpt-4.1", "gpt-4.1-mini", "o3", "o4-mini", "gemini-2.5-pro", "claude-3.7-sonnet",
"qwen-max", "deepseek-r1", "llama-3.3-70b", "mistral-large", "command-r-plus", "doubao-pro-32k",
"hunyuan-large",
},
[]string{
"gpt-3.5-turbo", "claude-2.1", "gemini-1.5-pro", "mixtral-8x7b", "qwen-plus", "glm-4",
"yi-large", "moonshot-v1", "doubao-lite",
},
)
require.Contains(t, content, "其余 4 个渠道已省略")
require.Contains(t, content, "其余 1 个已省略")
require.Contains(t, content, "失败渠道 ID展示 10/12")
require.Contains(t, content, "其余 2 个已省略")
}
func TestShouldSendUpstreamModelUpdateNotification(t *testing.T) {
channelUpstreamModelUpdateNotifyState.Lock()
channelUpstreamModelUpdateNotifyState.lastNotifiedAt = 0
channelUpstreamModelUpdateNotifyState.lastChangedChannels = 0
channelUpstreamModelUpdateNotifyState.lastFailedChannels = 0
channelUpstreamModelUpdateNotifyState.Unlock()
baseTime := int64(2000000)
require.True(t, shouldSendUpstreamModelUpdateNotification(baseTime, 6, 0))
require.False(t, shouldSendUpstreamModelUpdateNotification(baseTime+3600, 6, 0))
require.True(t, shouldSendUpstreamModelUpdateNotification(baseTime+3600, 7, 0))
require.False(t, shouldSendUpstreamModelUpdateNotification(baseTime+7200, 7, 0))
require.True(t, shouldSendUpstreamModelUpdateNotification(baseTime+8000, 0, 3))
require.False(t, shouldSendUpstreamModelUpdateNotification(baseTime+9000, 0, 3))
require.True(t, shouldSendUpstreamModelUpdateNotification(baseTime+10000, 0, 4))
require.True(t, shouldSendUpstreamModelUpdateNotification(baseTime+90000, 7, 0))
require.True(t, shouldSendUpstreamModelUpdateNotification(baseTime+90001, 0, 0))
}

View File

@@ -1,7 +1,6 @@
package controller
import (
"encoding/json"
"fmt"
"net/http"
"strings"
@@ -17,10 +16,56 @@ import (
"github.com/gin-gonic/gin"
)
var completionRatioMetaOptionKeys = []string{
"ModelPrice",
"ModelRatio",
"CompletionRatio",
"CacheRatio",
"CreateCacheRatio",
"ImageRatio",
"AudioRatio",
"AudioCompletionRatio",
}
func collectModelNamesFromOptionValue(raw string, modelNames map[string]struct{}) {
if strings.TrimSpace(raw) == "" {
return
}
var parsed map[string]any
if err := common.UnmarshalJsonStr(raw, &parsed); err != nil {
return
}
for modelName := range parsed {
modelNames[modelName] = struct{}{}
}
}
func buildCompletionRatioMetaValue(optionValues map[string]string) string {
modelNames := make(map[string]struct{})
for _, key := range completionRatioMetaOptionKeys {
collectModelNamesFromOptionValue(optionValues[key], modelNames)
}
meta := make(map[string]ratio_setting.CompletionRatioInfo, len(modelNames))
for modelName := range modelNames {
meta[modelName] = ratio_setting.GetCompletionRatioInfo(modelName)
}
jsonBytes, err := common.Marshal(meta)
if err != nil {
return "{}"
}
return string(jsonBytes)
}
func GetOptions(c *gin.Context) {
var options []*model.Option
optionValues := make(map[string]string)
common.OptionMapRWMutex.Lock()
for k, v := range common.OptionMap {
value := common.Interface2String(v)
if strings.HasSuffix(k, "Token") ||
strings.HasSuffix(k, "Secret") ||
strings.HasSuffix(k, "Key") ||
@@ -30,10 +75,20 @@ func GetOptions(c *gin.Context) {
}
options = append(options, &model.Option{
Key: k,
Value: common.Interface2String(v),
Value: value,
})
for _, optionKey := range completionRatioMetaOptionKeys {
if optionKey == k {
optionValues[k] = value
break
}
}
}
common.OptionMapRWMutex.Unlock()
options = append(options, &model.Option{
Key: "CompletionRatioMeta",
Value: buildCompletionRatioMetaValue(optionValues),
})
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
@@ -49,7 +104,7 @@ type OptionUpdateRequest struct {
func UpdateOption(c *gin.Context) {
var option OptionUpdateRequest
err := json.NewDecoder(c.Request.Body).Decode(&option)
err := common.DecodeJson(c.Request.Body, &option)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{
"success": false,

View File

@@ -341,6 +341,9 @@ func shouldRetry(c *gin.Context, openaiErr *types.NewAPIError, retryTimes int) b
if code < 100 || code > 599 {
return true
}
if operation_setting.IsAlwaysSkipRetryCode(openaiErr.GetErrorCode()) {
return false
}
return operation_setting.ShouldRetryByStatusCode(code)
}

View File

@@ -14,6 +14,23 @@ import (
"github.com/gin-gonic/gin"
)
func buildMaskedTokenResponse(token *model.Token) *model.Token {
if token == nil {
return nil
}
maskedToken := *token
maskedToken.Key = token.GetMaskedKey()
return &maskedToken
}
func buildMaskedTokenResponses(tokens []*model.Token) []*model.Token {
maskedTokens := make([]*model.Token, 0, len(tokens))
for _, token := range tokens {
maskedTokens = append(maskedTokens, buildMaskedTokenResponse(token))
}
return maskedTokens
}
func GetAllTokens(c *gin.Context) {
userId := c.GetInt("id")
pageInfo := common.GetPageQuery(c)
@@ -24,9 +41,8 @@ func GetAllTokens(c *gin.Context) {
}
total, _ := model.CountUserTokens(userId)
pageInfo.SetTotal(int(total))
pageInfo.SetItems(tokens)
pageInfo.SetItems(buildMaskedTokenResponses(tokens))
common.ApiSuccess(c, pageInfo)
return
}
func SearchTokens(c *gin.Context) {
@@ -42,9 +58,8 @@ func SearchTokens(c *gin.Context) {
return
}
pageInfo.SetTotal(int(total))
pageInfo.SetItems(tokens)
pageInfo.SetItems(buildMaskedTokenResponses(tokens))
common.ApiSuccess(c, pageInfo)
return
}
func GetToken(c *gin.Context) {
@@ -59,12 +74,24 @@ func GetToken(c *gin.Context) {
common.ApiError(c, err)
return
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": token,
common.ApiSuccess(c, buildMaskedTokenResponse(token))
}
func GetTokenKey(c *gin.Context) {
id, err := strconv.Atoi(c.Param("id"))
userId := c.GetInt("id")
if err != nil {
common.ApiError(c, err)
return
}
token, err := model.GetTokenByIds(id, userId)
if err != nil {
common.ApiError(c, err)
return
}
common.ApiSuccess(c, gin.H{
"key": token.GetFullKey(),
})
return
}
func GetTokenStatus(c *gin.Context) {
@@ -204,7 +231,6 @@ func AddToken(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func DeleteToken(c *gin.Context) {
@@ -219,7 +245,6 @@ func DeleteToken(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func UpdateToken(c *gin.Context) {
@@ -283,7 +308,7 @@ func UpdateToken(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": cleanToken,
"data": buildMaskedTokenResponse(cleanToken),
})
}

275
controller/token_test.go Normal file
View File

@@ -0,0 +1,275 @@
package controller
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"strconv"
"strings"
"testing"
"github.com/QuantumNous/new-api/common"
"github.com/QuantumNous/new-api/model"
"github.com/gin-gonic/gin"
"github.com/glebarez/sqlite"
"gorm.io/gorm"
)
type tokenAPIResponse struct {
Success bool `json:"success"`
Message string `json:"message"`
Data json.RawMessage `json:"data"`
}
type tokenPageResponse struct {
Items []tokenResponseItem `json:"items"`
}
type tokenResponseItem struct {
ID int `json:"id"`
Name string `json:"name"`
Key string `json:"key"`
Status int `json:"status"`
}
type tokenKeyResponse struct {
Key string `json:"key"`
}
func setupTokenControllerTestDB(t *testing.T) *gorm.DB {
t.Helper()
gin.SetMode(gin.TestMode)
common.UsingSQLite = true
common.UsingMySQL = false
common.UsingPostgreSQL = false
common.RedisEnabled = false
dsn := fmt.Sprintf("file:%s?mode=memory&cache=shared", strings.ReplaceAll(t.Name(), "/", "_"))
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
if err != nil {
t.Fatalf("failed to open sqlite db: %v", err)
}
model.DB = db
model.LOG_DB = db
if err := db.AutoMigrate(&model.Token{}); err != nil {
t.Fatalf("failed to migrate token table: %v", err)
}
t.Cleanup(func() {
sqlDB, err := db.DB()
if err == nil {
_ = sqlDB.Close()
}
})
return db
}
func seedToken(t *testing.T, db *gorm.DB, userID int, name string, rawKey string) *model.Token {
t.Helper()
token := &model.Token{
UserId: userID,
Name: name,
Key: rawKey,
Status: common.TokenStatusEnabled,
CreatedTime: 1,
AccessedTime: 1,
ExpiredTime: -1,
RemainQuota: 100,
UnlimitedQuota: true,
Group: "default",
}
if err := db.Create(token).Error; err != nil {
t.Fatalf("failed to create token: %v", err)
}
return token
}
func newAuthenticatedContext(t *testing.T, method string, target string, body any, userID int) (*gin.Context, *httptest.ResponseRecorder) {
t.Helper()
var requestBody *bytes.Reader
if body != nil {
payload, err := common.Marshal(body)
if err != nil {
t.Fatalf("failed to marshal request body: %v", err)
}
requestBody = bytes.NewReader(payload)
} else {
requestBody = bytes.NewReader(nil)
}
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
ctx.Request = httptest.NewRequest(method, target, requestBody)
if body != nil {
ctx.Request.Header.Set("Content-Type", "application/json")
}
ctx.Set("id", userID)
return ctx, recorder
}
func decodeAPIResponse(t *testing.T, recorder *httptest.ResponseRecorder) tokenAPIResponse {
t.Helper()
var response tokenAPIResponse
if err := common.Unmarshal(recorder.Body.Bytes(), &response); err != nil {
t.Fatalf("failed to decode api response: %v", err)
}
return response
}
func TestGetAllTokensMasksKeyInResponse(t *testing.T) {
db := setupTokenControllerTestDB(t)
token := seedToken(t, db, 1, "list-token", "abcd1234efgh5678")
seedToken(t, db, 2, "other-user-token", "zzzz1234yyyy5678")
ctx, recorder := newAuthenticatedContext(t, http.MethodGet, "/api/token/?p=1&size=10", nil, 1)
GetAllTokens(ctx)
response := decodeAPIResponse(t, recorder)
if !response.Success {
t.Fatalf("expected success response, got message: %s", response.Message)
}
var page tokenPageResponse
if err := common.Unmarshal(response.Data, &page); err != nil {
t.Fatalf("failed to decode token page response: %v", err)
}
if len(page.Items) != 1 {
t.Fatalf("expected exactly one token, got %d", len(page.Items))
}
if page.Items[0].Key != token.GetMaskedKey() {
t.Fatalf("expected masked key %q, got %q", token.GetMaskedKey(), page.Items[0].Key)
}
if strings.Contains(recorder.Body.String(), token.Key) {
t.Fatalf("list response leaked raw token key: %s", recorder.Body.String())
}
}
func TestSearchTokensMasksKeyInResponse(t *testing.T) {
db := setupTokenControllerTestDB(t)
token := seedToken(t, db, 1, "searchable-token", "ijkl1234mnop5678")
ctx, recorder := newAuthenticatedContext(t, http.MethodGet, "/api/token/search?keyword=searchable-token&p=1&size=10", nil, 1)
SearchTokens(ctx)
response := decodeAPIResponse(t, recorder)
if !response.Success {
t.Fatalf("expected success response, got message: %s", response.Message)
}
var page tokenPageResponse
if err := common.Unmarshal(response.Data, &page); err != nil {
t.Fatalf("failed to decode search response: %v", err)
}
if len(page.Items) != 1 {
t.Fatalf("expected exactly one search result, got %d", len(page.Items))
}
if page.Items[0].Key != token.GetMaskedKey() {
t.Fatalf("expected masked search key %q, got %q", token.GetMaskedKey(), page.Items[0].Key)
}
if strings.Contains(recorder.Body.String(), token.Key) {
t.Fatalf("search response leaked raw token key: %s", recorder.Body.String())
}
}
func TestGetTokenMasksKeyInResponse(t *testing.T) {
db := setupTokenControllerTestDB(t)
token := seedToken(t, db, 1, "detail-token", "qrst1234uvwx5678")
ctx, recorder := newAuthenticatedContext(t, http.MethodGet, "/api/token/"+strconv.Itoa(token.Id), nil, 1)
ctx.Params = gin.Params{{Key: "id", Value: strconv.Itoa(token.Id)}}
GetToken(ctx)
response := decodeAPIResponse(t, recorder)
if !response.Success {
t.Fatalf("expected success response, got message: %s", response.Message)
}
var detail tokenResponseItem
if err := common.Unmarshal(response.Data, &detail); err != nil {
t.Fatalf("failed to decode token detail response: %v", err)
}
if detail.Key != token.GetMaskedKey() {
t.Fatalf("expected masked detail key %q, got %q", token.GetMaskedKey(), detail.Key)
}
if strings.Contains(recorder.Body.String(), token.Key) {
t.Fatalf("detail response leaked raw token key: %s", recorder.Body.String())
}
}
func TestUpdateTokenMasksKeyInResponse(t *testing.T) {
db := setupTokenControllerTestDB(t)
token := seedToken(t, db, 1, "editable-token", "yzab1234cdef5678")
body := map[string]any{
"id": token.Id,
"name": "updated-token",
"expired_time": -1,
"remain_quota": 100,
"unlimited_quota": true,
"model_limits_enabled": false,
"model_limits": "",
"group": "default",
"cross_group_retry": false,
}
ctx, recorder := newAuthenticatedContext(t, http.MethodPut, "/api/token/", body, 1)
UpdateToken(ctx)
response := decodeAPIResponse(t, recorder)
if !response.Success {
t.Fatalf("expected success response, got message: %s", response.Message)
}
var detail tokenResponseItem
if err := common.Unmarshal(response.Data, &detail); err != nil {
t.Fatalf("failed to decode token update response: %v", err)
}
if detail.Key != token.GetMaskedKey() {
t.Fatalf("expected masked update key %q, got %q", token.GetMaskedKey(), detail.Key)
}
if strings.Contains(recorder.Body.String(), token.Key) {
t.Fatalf("update response leaked raw token key: %s", recorder.Body.String())
}
}
func TestGetTokenKeyRequiresOwnershipAndReturnsFullKey(t *testing.T) {
db := setupTokenControllerTestDB(t)
token := seedToken(t, db, 1, "owned-token", "owner1234token5678")
authorizedCtx, authorizedRecorder := newAuthenticatedContext(t, http.MethodPost, "/api/token/"+strconv.Itoa(token.Id)+"/key", nil, 1)
authorizedCtx.Params = gin.Params{{Key: "id", Value: strconv.Itoa(token.Id)}}
GetTokenKey(authorizedCtx)
authorizedResponse := decodeAPIResponse(t, authorizedRecorder)
if !authorizedResponse.Success {
t.Fatalf("expected authorized key fetch to succeed, got message: %s", authorizedResponse.Message)
}
var keyData tokenKeyResponse
if err := common.Unmarshal(authorizedResponse.Data, &keyData); err != nil {
t.Fatalf("failed to decode token key response: %v", err)
}
if keyData.Key != token.GetFullKey() {
t.Fatalf("expected full key %q, got %q", token.GetFullKey(), keyData.Key)
}
unauthorizedCtx, unauthorizedRecorder := newAuthenticatedContext(t, http.MethodPost, "/api/token/"+strconv.Itoa(token.Id)+"/key", nil, 2)
unauthorizedCtx.Params = gin.Params{{Key: "id", Value: strconv.Itoa(token.Id)}}
GetTokenKey(unauthorizedCtx)
unauthorizedResponse := decodeAPIResponse(t, unauthorizedRecorder)
if unauthorizedResponse.Success {
t.Fatalf("expected unauthorized key fetch to fail")
}
if strings.Contains(unauthorizedRecorder.Body.String(), token.Key) {
t.Fatalf("unauthorized key response leaked raw token key: %s", unauthorizedRecorder.Body.String())
}
}

View File

@@ -1032,17 +1032,18 @@ func TopUp(c *gin.Context) {
}
type UpdateUserSettingRequest struct {
QuotaWarningType string `json:"notify_type"`
QuotaWarningThreshold float64 `json:"quota_warning_threshold"`
WebhookUrl string `json:"webhook_url,omitempty"`
WebhookSecret string `json:"webhook_secret,omitempty"`
NotificationEmail string `json:"notification_email,omitempty"`
BarkUrl string `json:"bark_url,omitempty"`
GotifyUrl string `json:"gotify_url,omitempty"`
GotifyToken string `json:"gotify_token,omitempty"`
GotifyPriority int `json:"gotify_priority,omitempty"`
AcceptUnsetModelRatioModel bool `json:"accept_unset_model_ratio_model"`
RecordIpLog bool `json:"record_ip_log"`
QuotaWarningType string `json:"notify_type"`
QuotaWarningThreshold float64 `json:"quota_warning_threshold"`
WebhookUrl string `json:"webhook_url,omitempty"`
WebhookSecret string `json:"webhook_secret,omitempty"`
NotificationEmail string `json:"notification_email,omitempty"`
BarkUrl string `json:"bark_url,omitempty"`
GotifyUrl string `json:"gotify_url,omitempty"`
GotifyToken string `json:"gotify_token,omitempty"`
GotifyPriority int `json:"gotify_priority,omitempty"`
UpstreamModelUpdateNotifyEnabled *bool `json:"upstream_model_update_notify_enabled,omitempty"`
AcceptUnsetModelRatioModel bool `json:"accept_unset_model_ratio_model"`
RecordIpLog bool `json:"record_ip_log"`
}
func UpdateUserSetting(c *gin.Context) {
@@ -1132,13 +1133,19 @@ func UpdateUserSetting(c *gin.Context) {
common.ApiError(c, err)
return
}
existingSettings := user.GetSetting()
upstreamModelUpdateNotifyEnabled := existingSettings.UpstreamModelUpdateNotifyEnabled
if user.Role >= common.RoleAdminUser && req.UpstreamModelUpdateNotifyEnabled != nil {
upstreamModelUpdateNotifyEnabled = *req.UpstreamModelUpdateNotifyEnabled
}
// 构建设置
settings := dto.UserSetting{
NotifyType: req.QuotaWarningType,
QuotaWarningThreshold: req.QuotaWarningThreshold,
AcceptUnsetRatioModel: req.AcceptUnsetModelRatioModel,
RecordIpLog: req.RecordIpLog,
NotifyType: req.QuotaWarningType,
QuotaWarningThreshold: req.QuotaWarningThreshold,
UpstreamModelUpdateNotifyEnabled: upstreamModelUpdateNotifyEnabled,
AcceptUnsetRatioModel: req.AcceptUnsetModelRatioModel,
RecordIpLog: req.RecordIpLog,
}
// 如果是webhook类型,添加webhook相关设置

View File

@@ -35,7 +35,8 @@ func VideoProxy(c *gin.Context) {
return
}
task, exists, err := model.GetByOnlyTaskId(taskID)
userID := c.GetInt("id")
task, exists, err := model.GetByTaskId(userID, taskID)
if err != nil {
logger.LogError(c.Request.Context(), fmt.Sprintf("Failed to query task %s: %s", taskID, err.Error()))
videoProxyError(c, http.StatusInternalServerError, "server_error", "Failed to query task")

View File

@@ -43,6 +43,8 @@ services:
- redis
- postgres
# - mysql # Uncomment if using MySQL
networks:
- new-api-network
healthcheck:
test: ["CMD-SHELL", "wget -q -O - http://localhost:3000/api/status | grep -o '\"success\":\\s*true' || exit 1"]
interval: 30s
@@ -53,6 +55,8 @@ services:
image: redis:latest
container_name: redis
restart: always
networks:
- new-api-network
postgres:
image: postgres:15
@@ -64,6 +68,8 @@ services:
POSTGRES_DB: new-api
volumes:
- pg_data:/var/lib/postgresql/data
networks:
- new-api-network
# ports:
# - "5432:5432" # Uncomment if you need to access PostgreSQL from outside Docker
@@ -76,9 +82,15 @@ services:
# MYSQL_DATABASE: new-api
# volumes:
# - mysql_data:/var/lib/mysql
# networks:
# - new-api-network
# ports:
# - "3306:3306" # Uncomment if you need to access MySQL from outside Docker
volumes:
pg_data:
# mysql_data:
networks:
new-api-network:
driver: bridge

View File

@@ -24,16 +24,22 @@ const (
)
type ChannelOtherSettings struct {
AzureResponsesVersion string `json:"azure_responses_version,omitempty"`
VertexKeyType VertexKeyType `json:"vertex_key_type,omitempty"` // "json" or "api_key"
OpenRouterEnterprise *bool `json:"openrouter_enterprise,omitempty"`
ClaudeBetaQuery bool `json:"claude_beta_query,omitempty"` // Claude 渠道是否强制追加 ?beta=true
AllowServiceTier bool `json:"allow_service_tier,omitempty"` // 是否允许 service_tier 透传(默认过滤以避免额外计费)
AllowInferenceGeo bool `json:"allow_inference_geo,omitempty"` // 是否允许 inference_geo 透传(仅 Claude默认过滤以满足数据驻留合规
DisableStore bool `json:"disable_store,omitempty"` // 是否禁用 store 透传(默认允许透传,禁用后可能导致 Codex 无法使用
AllowSafetyIdentifier bool `json:"allow_safety_identifier,omitempty"` // 是否允许 safety_identifier 透传(默认过滤以保护用户隐私
AllowIncludeObfuscation bool `json:"allow_include_obfuscation,omitempty"` // 是否允许 stream_options.include_obfuscation 透传(默认过滤以避免关闭流混淆保护)
AwsKeyType AwsKeyType `json:"aws_key_type,omitempty"`
AzureResponsesVersion string `json:"azure_responses_version,omitempty"`
VertexKeyType VertexKeyType `json:"vertex_key_type,omitempty"` // "json" or "api_key"
OpenRouterEnterprise *bool `json:"openrouter_enterprise,omitempty"`
ClaudeBetaQuery bool `json:"claude_beta_query,omitempty"` // Claude 渠道是否强制追加 ?beta=true
AllowServiceTier bool `json:"allow_service_tier,omitempty"` // 是否允许 service_tier 透传(默认过滤以避免额外计费)
AllowInferenceGeo bool `json:"allow_inference_geo,omitempty"` // 是否允许 inference_geo 透传(仅 Claude默认过滤以满足数据驻留合规
AllowSafetyIdentifier bool `json:"allow_safety_identifier,omitempty"` // 是否允许 safety_identifier 透传(默认过滤以保护用户隐私
DisableStore bool `json:"disable_store,omitempty"` // 是否禁用 store 透传(默认允许透传,禁用后可能导致 Codex 无法使用
AllowIncludeObfuscation bool `json:"allow_include_obfuscation,omitempty"` // 是否允许 stream_options.include_obfuscation 透传(默认过滤以避免关闭流混淆保护)
AwsKeyType AwsKeyType `json:"aws_key_type,omitempty"`
UpstreamModelUpdateCheckEnabled bool `json:"upstream_model_update_check_enabled,omitempty"` // 是否检测上游模型更新
UpstreamModelUpdateAutoSyncEnabled bool `json:"upstream_model_update_auto_sync_enabled,omitempty"` // 是否自动同步上游模型更新
UpstreamModelUpdateLastCheckTime int64 `json:"upstream_model_update_last_check_time,omitempty"` // 上次检测时间
UpstreamModelUpdateLastDetectedModels []string `json:"upstream_model_update_last_detected_models,omitempty"` // 上次检测到的可加入模型
UpstreamModelUpdateLastRemovedModels []string `json:"upstream_model_update_last_removed_models,omitempty"` // 上次检测到的可删除模型
UpstreamModelUpdateIgnoredModels []string `json:"upstream_model_update_ignored_models,omitempty"` // 手动忽略的模型
}
func (s *ChannelOtherSettings) IsOpenRouterEnterprise() bool {

View File

@@ -218,6 +218,11 @@ type ClaudeRequest struct {
ServiceTier string `json:"service_tier,omitempty"`
}
// OutputConfigForEffort just for extract effort
type OutputConfigForEffort struct {
Effort string `json:"effort,omitempty"`
}
// createClaudeFileSource 根据数据内容创建正确类型的 FileSource
func createClaudeFileSource(data string) *types.FileSource {
if strings.HasPrefix(data, "http://") || strings.HasPrefix(data, "https://") {
@@ -409,6 +414,15 @@ func (c *ClaudeRequest) GetTools() []any {
}
}
func (c *ClaudeRequest) GetEfforts() string {
var OutputConfig OutputConfigForEffort
if err := json.Unmarshal(c.OutputConfig, &OutputConfig); err == nil {
effort := OutputConfig.Effort
return effort
}
return ""
}
// ProcessTools 处理工具列表,支持类型断言
func ProcessTools(tools []any) ([]*Tool, []*ClaudeWebSearchTool) {
var normalTools []*Tool

View File

@@ -56,10 +56,10 @@ type GeneralOpenAIRequest struct {
Tools []ToolCallRequest `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
FunctionCall json.RawMessage `json:"function_call,omitempty"`
User string `json:"user,omitempty"`
User json.RawMessage `json:"user,omitempty"`
// ServiceTier specifies upstream service level and may affect billing.
// This field is filtered by default and can be enabled via channel setting allow_service_tier.
ServiceTier string `json:"service_tier,omitempty"`
ServiceTier json.RawMessage `json:"service_tier,omitempty"`
LogProbs *bool `json:"logprobs,omitempty"`
TopLogProbs *int `json:"top_logprobs,omitempty"`
Dimensions *int `json:"dimensions,omitempty"`
@@ -67,7 +67,7 @@ type GeneralOpenAIRequest struct {
Audio json.RawMessage `json:"audio,omitempty"`
// 安全标识符,用于帮助 OpenAI 检测可能违反使用政策的应用程序用户
// 注意:此字段会向 OpenAI 发送用户标识信息,默认过滤,可通过 allow_safety_identifier 开启
SafetyIdentifier string `json:"safety_identifier,omitempty"`
SafetyIdentifier json.RawMessage `json:"safety_identifier,omitempty"`
// Whether or not to store the output of this chat completion request for use in our model distillation or evals products.
// 是否存储此次请求数据供 OpenAI 用于评估和优化产品
// 注意:默认允许透传,可通过 disable_store 禁用;禁用后可能导致 Codex 无法正常使用
@@ -100,10 +100,10 @@ type GeneralOpenAIRequest struct {
THINKING json.RawMessage `json:"thinking,omitempty"`
// pplx Params
SearchDomainFilter json.RawMessage `json:"search_domain_filter,omitempty"`
SearchRecencyFilter string `json:"search_recency_filter,omitempty"`
SearchRecencyFilter json.RawMessage `json:"search_recency_filter,omitempty"`
ReturnImages *bool `json:"return_images,omitempty"`
ReturnRelatedQuestions *bool `json:"return_related_questions,omitempty"`
SearchMode string `json:"search_mode,omitempty"`
SearchMode json.RawMessage `json:"search_mode,omitempty"`
// Minimax
ReasoningSplit json.RawMessage `json:"reasoning_split,omitempty"`
}
@@ -836,7 +836,7 @@ type OpenAIResponsesRequest struct {
PromptCacheRetention json.RawMessage `json:"prompt_cache_retention,omitempty"`
// SafetyIdentifier carries client identity for policy abuse detection.
// This field is filtered by default and can be enabled via channel setting allow_safety_identifier.
SafetyIdentifier string `json:"safety_identifier,omitempty"`
SafetyIdentifier json.RawMessage `json:"safety_identifier,omitempty"`
Stream *bool `json:"stream,omitempty"`
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
@@ -844,8 +844,8 @@ type OpenAIResponsesRequest struct {
ToolChoice json.RawMessage `json:"tool_choice,omitempty"`
Tools json.RawMessage `json:"tools,omitempty"` // 需要处理的参数很少MCP 参数太多不确定,所以用 map
TopP *float64 `json:"top_p,omitempty"`
Truncation string `json:"truncation,omitempty"`
User string `json:"user,omitempty"`
Truncation json.RawMessage `json:"truncation,omitempty"`
User json.RawMessage `json:"user,omitempty"`
MaxToolCalls *uint `json:"max_tool_calls,omitempty"`
Prompt json.RawMessage `json:"prompt,omitempty"`
// qwen

View File

@@ -267,7 +267,7 @@ type OpenAIResponsesResponse struct {
ID string `json:"id"`
Object string `json:"object"`
CreatedAt int `json:"created_at"`
Status string `json:"status"`
Status json.RawMessage `json:"status"`
Error any `json:"error,omitempty"`
IncompleteDetails *IncompleteDetails `json:"incomplete_details,omitempty"`
Instructions string `json:"instructions"`
@@ -275,14 +275,14 @@ type OpenAIResponsesResponse struct {
Model string `json:"model"`
Output []ResponsesOutput `json:"output"`
ParallelToolCalls bool `json:"parallel_tool_calls"`
PreviousResponseID string `json:"previous_response_id"`
PreviousResponseID json.RawMessage `json:"previous_response_id"`
Reasoning *Reasoning `json:"reasoning"`
Store bool `json:"store"`
Temperature float64 `json:"temperature"`
ToolChoice string `json:"tool_choice"`
ToolChoice json.RawMessage `json:"tool_choice"`
Tools []map[string]any `json:"tools"`
TopP float64 `json:"top_p"`
Truncation string `json:"truncation"`
Truncation json.RawMessage `json:"truncation"`
Usage *Usage `json:"usage"`
User json.RawMessage `json:"user"`
Metadata json.RawMessage `json:"metadata"`

View File

@@ -1,20 +1,21 @@
package dto
type UserSetting struct {
NotifyType string `json:"notify_type,omitempty"` // QuotaWarningType 额度预警类型
QuotaWarningThreshold float64 `json:"quota_warning_threshold,omitempty"` // QuotaWarningThreshold 额度预警阈值
WebhookUrl string `json:"webhook_url,omitempty"` // WebhookUrl webhook地址
WebhookSecret string `json:"webhook_secret,omitempty"` // WebhookSecret webhook密钥
NotificationEmail string `json:"notification_email,omitempty"` // NotificationEmail 通知邮箱地址
BarkUrl string `json:"bark_url,omitempty"` // BarkUrl Bark推送URL
GotifyUrl string `json:"gotify_url,omitempty"` // GotifyUrl Gotify服务器地址
GotifyToken string `json:"gotify_token,omitempty"` // GotifyToken Gotify应用令牌
GotifyPriority int `json:"gotify_priority"` // GotifyPriority Gotify消息优先级
AcceptUnsetRatioModel bool `json:"accept_unset_model_ratio_model,omitempty"` // AcceptUnsetRatioModel 是否接受未设置价格的模型
RecordIpLog bool `json:"record_ip_log,omitempty"` // 是否记录请求和错误日志IP
SidebarModules string `json:"sidebar_modules,omitempty"` // SidebarModules 左侧边栏模块配置
BillingPreference string `json:"billing_preference,omitempty"` // BillingPreference 扣费策略(订阅/钱包)
Language string `json:"language,omitempty"` // Language 用户语言偏好 (zh, en)
NotifyType string `json:"notify_type,omitempty"` // QuotaWarningType 额度预警类型
QuotaWarningThreshold float64 `json:"quota_warning_threshold,omitempty"` // QuotaWarningThreshold 额度预警阈值
WebhookUrl string `json:"webhook_url,omitempty"` // WebhookUrl webhook地址
WebhookSecret string `json:"webhook_secret,omitempty"` // WebhookSecret webhook密钥
NotificationEmail string `json:"notification_email,omitempty"` // NotificationEmail 通知邮箱地址
BarkUrl string `json:"bark_url,omitempty"` // BarkUrl Bark推送URL
GotifyUrl string `json:"gotify_url,omitempty"` // GotifyUrl Gotify服务器地址
GotifyToken string `json:"gotify_token,omitempty"` // GotifyToken Gotify应用令牌
GotifyPriority int `json:"gotify_priority"` // GotifyPriority Gotify消息优先级
UpstreamModelUpdateNotifyEnabled bool `json:"upstream_model_update_notify_enabled,omitempty"` // 是否接收上游模型更新定时检测通知(仅管理员)
AcceptUnsetRatioModel bool `json:"accept_unset_model_ratio_model,omitempty"` // AcceptUnsetRatioModel 是否接受未设置价格的模型
RecordIpLog bool `json:"record_ip_log,omitempty"` // 是否记录请求和错误日志IP
SidebarModules string `json:"sidebar_modules,omitempty"` // SidebarModules 左侧边栏模块配置
BillingPreference string `json:"billing_preference,omitempty"` // BillingPreference 扣费策略(订阅/钱包)
Language string `json:"language,omitempty"` // Language 用户语言偏好 (zh, en)
}
var (

View File

@@ -121,6 +121,9 @@ func main() {
return a
}
// Channel upstream model update check task
controller.StartChannelUpstreamModelUpdateTask()
if common.IsMasterNode && constant.UpdateTask {
gopool.Go(func() {
controller.UpdateMidjourneyTaskBulk()

View File

@@ -250,6 +250,10 @@ func InitLogDB() (err error) {
func migrateDB() error {
// Migrate price_amount column from float/double to decimal for existing tables
migrateSubscriptionPlanPriceAmount()
// Migrate model_limits column from varchar to text for existing tables
if err := migrateTokenModelLimitsToText(); err != nil {
return err
}
err := DB.AutoMigrate(
&Channel{},
@@ -445,6 +449,59 @@ PRIMARY KEY (` + "`id`" + `)
return nil
}
// migrateTokenModelLimitsToText migrates model_limits column from varchar(1024) to text
// This is safe to run multiple times - it checks the column type first
func migrateTokenModelLimitsToText() error {
// SQLite uses type affinity, so TEXT and VARCHAR are effectively the same — no migration needed
if common.UsingSQLite {
return nil
}
tableName := "tokens"
columnName := "model_limits"
if !DB.Migrator().HasTable(tableName) {
return nil
}
if !DB.Migrator().HasColumn(&Token{}, columnName) {
return nil
}
var alterSQL string
if common.UsingPostgreSQL {
var dataType string
if err := DB.Raw(`SELECT data_type FROM information_schema.columns
WHERE table_schema = current_schema() AND table_name = ? AND column_name = ?`,
tableName, columnName).Scan(&dataType).Error; err != nil {
common.SysLog(fmt.Sprintf("Warning: failed to query metadata for %s.%s: %v", tableName, columnName, err))
} else if dataType == "text" {
return nil
}
alterSQL = fmt.Sprintf(`ALTER TABLE %s ALTER COLUMN %s TYPE text`, tableName, columnName)
} else if common.UsingMySQL {
var columnType string
if err := DB.Raw(`SELECT COLUMN_TYPE FROM information_schema.columns
WHERE table_schema = DATABASE() AND table_name = ? AND column_name = ?`,
tableName, columnName).Scan(&columnType).Error; err != nil {
common.SysLog(fmt.Sprintf("Warning: failed to query metadata for %s.%s: %v", tableName, columnName, err))
} else if strings.ToLower(columnType) == "text" {
return nil
}
alterSQL = fmt.Sprintf("ALTER TABLE %s MODIFY COLUMN %s text", tableName, columnName)
} else {
return nil
}
if alterSQL != "" {
if err := DB.Exec(alterSQL).Error; err != nil {
return fmt.Errorf("failed to migrate %s.%s to text: %w", tableName, columnName, err)
}
common.SysLog(fmt.Sprintf("Successfully migrated %s.%s to text", tableName, columnName))
}
return nil
}
// migrateSubscriptionPlanPriceAmount migrates price_amount column from float/double to decimal(10,6)
// This is safe to run multiple times - it checks the column type first
func migrateSubscriptionPlanPriceAmount() {
@@ -471,9 +528,11 @@ func migrateSubscriptionPlanPriceAmount() {
if common.UsingPostgreSQL {
// PostgreSQL: Check if already decimal/numeric
var dataType string
DB.Raw(`SELECT data_type FROM information_schema.columns
WHERE table_name = ? AND column_name = ?`, tableName, columnName).Scan(&dataType)
if dataType == "numeric" {
if err := DB.Raw(`SELECT data_type FROM information_schema.columns
WHERE table_schema = current_schema() AND table_name = ? AND column_name = ?`,
tableName, columnName).Scan(&dataType).Error; err != nil {
common.SysLog(fmt.Sprintf("Warning: failed to query metadata for %s.%s: %v", tableName, columnName, err))
} else if dataType == "numeric" {
return // Already decimal/numeric
}
alterSQL = fmt.Sprintf(`ALTER TABLE %s ALTER COLUMN %s TYPE decimal(10,6) USING %s::decimal(10,6)`,
@@ -481,10 +540,11 @@ func migrateSubscriptionPlanPriceAmount() {
} else if common.UsingMySQL {
// MySQL: Check if already decimal
var columnType string
DB.Raw(`SELECT COLUMN_TYPE FROM information_schema.columns
WHERE table_schema = DATABASE() AND table_name = ? AND column_name = ?`,
tableName, columnName).Scan(&columnType)
if strings.HasPrefix(strings.ToLower(columnType), "decimal") {
if err := DB.Raw(`SELECT COLUMN_TYPE FROM information_schema.columns
WHERE table_schema = DATABASE() AND table_name = ? AND column_name = ?`,
tableName, columnName).Scan(&columnType).Error; err != nil {
common.SysLog(fmt.Sprintf("Warning: failed to query metadata for %s.%s: %v", tableName, columnName, err))
} else if strings.HasPrefix(strings.ToLower(columnType), "decimal") {
return // Already decimal
}
alterSQL = fmt.Sprintf("ALTER TABLE %s MODIFY COLUMN %s decimal(10,6) NOT NULL DEFAULT 0",

View File

@@ -25,6 +25,11 @@ type Pricing struct {
ModelPrice float64 `json:"model_price"`
OwnerBy string `json:"owner_by"`
CompletionRatio float64 `json:"completion_ratio"`
CacheRatio *float64 `json:"cache_ratio,omitempty"`
CreateCacheRatio *float64 `json:"create_cache_ratio,omitempty"`
ImageRatio *float64 `json:"image_ratio,omitempty"`
AudioRatio *float64 `json:"audio_ratio,omitempty"`
AudioCompletionRatio *float64 `json:"audio_completion_ratio,omitempty"`
EnableGroup []string `json:"enable_groups"`
SupportedEndpointTypes []constant.EndpointType `json:"supported_endpoint_types"`
PricingVersion string `json:"pricing_version,omitempty"`
@@ -297,12 +302,29 @@ func updatePricing() {
pricing.CompletionRatio = ratio_setting.GetCompletionRatio(model)
pricing.QuotaType = 0
}
if cacheRatio, ok := ratio_setting.GetCacheRatio(model); ok {
pricing.CacheRatio = &cacheRatio
}
if createCacheRatio, ok := ratio_setting.GetCreateCacheRatio(model); ok {
pricing.CreateCacheRatio = &createCacheRatio
}
if imageRatio, ok := ratio_setting.GetImageRatio(model); ok {
pricing.ImageRatio = &imageRatio
}
if ratio_setting.ContainsAudioRatio(model) {
audioRatio := ratio_setting.GetAudioRatio(model)
pricing.AudioRatio = &audioRatio
}
if ratio_setting.ContainsAudioCompletionRatio(model) {
audioCompletionRatio := ratio_setting.GetAudioCompletionRatio(model)
pricing.AudioCompletionRatio = &audioCompletionRatio
}
pricingMap = append(pricingMap, pricing)
}
// 防止大更新后数据不通用
if len(pricingMap) > 0 {
pricingMap[0].PricingVersion = "82c4a357505fff6fee8462c3f7ec8a645bb95532669cb73b2cabee6a416ec24f"
pricingMap[0].PricingVersion = "5a90f2b86c08bd983a9a2e6d66c255f4eaef9c4bc934386d2b6ae84ef0ff1f1f"
}
// 刷新缓存映射,供高并发快速查询

View File

@@ -23,7 +23,7 @@ type Token struct {
RemainQuota int `json:"remain_quota" gorm:"default:0"`
UnlimitedQuota bool `json:"unlimited_quota"`
ModelLimitsEnabled bool `json:"model_limits_enabled"`
ModelLimits string `json:"model_limits" gorm:"type:varchar(1024);default:''"`
ModelLimits string `json:"model_limits" gorm:"type:text"`
AllowIps *string `json:"allow_ips" gorm:"default:''"`
UsedQuota int `json:"used_quota" gorm:"default:0"` // used quota
Group string `json:"group" gorm:"default:''"`
@@ -35,6 +35,27 @@ func (token *Token) Clean() {
token.Key = ""
}
func MaskTokenKey(key string) string {
if key == "" {
return ""
}
if len(key) <= 4 {
return strings.Repeat("*", len(key))
}
if len(key) <= 8 {
return key[:2] + "****" + key[len(key)-2:]
}
return key[:4] + "**********" + key[len(key)-4:]
}
func (token *Token) GetFullKey() string {
return token.Key
}
func (token *Token) GetMaskedKey() string {
return MaskTokenKey(token.Key)
}
func (token *Token) GetIpLimits() []string {
// delete empty spaces
//split with \n
@@ -201,7 +222,7 @@ func ValidateUserToken(key string) (token *Token, err error) {
}
keyPrefix := key[:3]
keySuffix := key[len(key)-3:]
return token, errors.New(fmt.Sprintf("[sk-%s***%s] 该令牌额度已用尽 !token.UnlimitedQuota && token.RemainQuota = %d", keyPrefix, keySuffix, token.RemainQuota))
return token, fmt.Errorf("[sk-%s***%s] 该令牌额度已用尽 !token.UnlimitedQuota && token.RemainQuota = %d", keyPrefix, keySuffix, token.RemainQuota)
}
return token, nil
}

View File

@@ -100,6 +100,9 @@ func getHeaderPassthroughRegex(pattern string) (*regexp.Regexp, error) {
return compiled, nil
}
func IsHeaderPassthroughRuleKey(key string) bool {
return isHeaderPassthroughRuleKey(key)
}
func isHeaderPassthroughRuleKey(key string) bool {
key = strings.TrimSpace(key)
if key == "" {

View File

@@ -1,6 +1,7 @@
package baidu
import (
"encoding/json"
"time"
"github.com/QuantumNous/new-api/dto"
@@ -12,16 +13,16 @@ type BaiduMessage struct {
}
type BaiduChatRequest struct {
Messages []BaiduMessage `json:"messages"`
Temperature *float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
PenaltyScore float64 `json:"penalty_score,omitempty"`
Stream bool `json:"stream,omitempty"`
System string `json:"system,omitempty"`
DisableSearch bool `json:"disable_search,omitempty"`
EnableCitation bool `json:"enable_citation,omitempty"`
MaxOutputTokens *int `json:"max_output_tokens,omitempty"`
UserId string `json:"user_id,omitempty"`
Messages []BaiduMessage `json:"messages"`
Temperature *float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
PenaltyScore float64 `json:"penalty_score,omitempty"`
Stream bool `json:"stream,omitempty"`
System string `json:"system,omitempty"`
DisableSearch bool `json:"disable_search,omitempty"`
EnableCitation bool `json:"enable_citation,omitempty"`
MaxOutputTokens *int `json:"max_output_tokens,omitempty"`
UserId json.RawMessage `json:"user_id,omitempty"`
}
type Error struct {

View File

@@ -25,6 +25,7 @@ var ModelList = []string{
"claude-opus-4-6-high",
"claude-opus-4-6-medium",
"claude-opus-4-6-low",
"claude-sonnet-4-6",
}
var ChannelName = "claude"

View File

@@ -8,7 +8,8 @@ import (
var baseModelList = []string{
"gpt-5", "gpt-5-codex", "gpt-5-codex-mini",
"gpt-5.1", "gpt-5.1-codex", "gpt-5.1-codex-max", "gpt-5.1-codex-mini",
"gpt-5.2", "gpt-5.2-codex", "gpt-5.3-codex",
"gpt-5.2", "gpt-5.2-codex", "gpt-5.3-codex", "gpt-5.3-codex-spark",
"gpt-5.4",
}
var ModelList = withCompactModelSuffix(baseModelList)

View File

@@ -17,7 +17,7 @@ type CozeEnterMessage struct {
type CozeChatRequest struct {
BotId string `json:"bot_id"`
UserId string `json:"user_id"`
UserId json.RawMessage `json:"user_id"`
AdditionalMessages []CozeEnterMessage `json:"additional_messages,omitempty"`
Stream bool `json:"stream,omitempty"`
CustomVariables json.RawMessage `json:"custom_variables,omitempty"`

View File

@@ -34,8 +34,8 @@ func convertCozeChatRequest(c *gin.Context, request dto.GeneralOpenAIRequest) *C
}
}
user := request.User
if user == "" {
user = helper.GetResponseID(c)
if len(user) == 0 {
user = json.RawMessage(helper.GetResponseID(c))
}
cozeRequest := &CozeChatRequest{
BotId: c.GetString("bot_id"),

View File

@@ -1,6 +1,8 @@
package dify
import "github.com/QuantumNous/new-api/dto"
import (
"github.com/QuantumNous/new-api/dto"
)
type DifyChatRequest struct {
Inputs map[string]interface{} `json:"inputs"`

View File

@@ -131,10 +131,16 @@ func requestOpenAI2Dify(c *gin.Context, info *relaycommon.RelayInfo, request dto
}
user := request.User
if user == "" {
user = helper.GetResponseID(c)
if len(user) == 0 {
user = json.RawMessage(helper.GetResponseID(c))
}
difyReq.User = user
var stringUser string
err := json.Unmarshal(user, &stringUser)
if err != nil {
common.SysLog("failed to unmarshal user: " + err.Error())
stringUser = helper.GetResponseID(c)
}
difyReq.User = stringUser
files := make([]DifyFile, 0)
var content strings.Builder

View File

@@ -59,7 +59,7 @@ func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInf
func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
if !strings.HasPrefix(info.UpstreamModelName, "imagen") {
return nil, errors.New("not supported model for image generation")
return nil, errors.New("not supported model for image generation, only imagen models are supported")
}
// convert size to aspect ratio but allow user to specify aspect ratio

View File

@@ -2,29 +2,34 @@ package gemini
var ModelList = []string{
// stable version
"gemini-1.5-pro", "gemini-1.5-flash", "gemini-1.5-flash-8b",
"gemini-2.0-flash",
"gemini-2.5-flash", "gemini-2.5-pro", "gemini-2.0-flash",
"gemini-2.0-flash-001", "gemini-2.0-flash-lite-001", "gemini-2.0-flash-lite",
"gemini-2.5-flash-lite",
// latest version
"gemini-1.5-pro-latest", "gemini-1.5-flash-latest",
"gemini-flash-latest", "gemini-flash-lite-latest", "gemini-pro-latest",
"gemini-2.5-flash-native-audio-latest",
// preview version
"gemini-2.0-flash-lite-preview",
"gemini-3-pro-preview",
// gemini exp
"gemini-exp-1206",
// flash exp
"gemini-2.0-flash-exp",
// pro exp
"gemini-2.0-pro-exp",
// thinking exp
"gemini-2.0-flash-thinking-exp",
"gemini-2.5-pro-exp-03-25",
"gemini-2.5-pro-preview-03-25",
// imagen models
"imagen-3.0-generate-002",
"gemini-2.5-flash-preview-tts", "gemini-2.5-pro-preview-tts",
"gemini-2.5-flash-image", "gemini-2.5-flash-lite-preview-09-2025",
"gemini-3-pro-preview", "gemini-3-flash-preview", "gemini-3.1-pro-preview",
"gemini-3.1-pro-preview-customtools", "gemini-3.1-flash-lite-preview",
"gemini-3-pro-image-preview", "nano-banana-pro-preview",
"gemini-3.1-flash-image-preview", "gemini-robotics-er-1.5-preview",
"gemini-2.5-computer-use-preview-10-2025", "deep-research-pro-preview-12-2025",
"gemini-2.5-flash-native-audio-preview-09-2025", "gemini-2.5-flash-native-audio-preview-12-2025",
// gemma models
"gemma-3-1b-it", "gemma-3-4b-it", "gemma-3-12b-it",
"gemma-3-27b-it", "gemma-3n-e4b-it", "gemma-3n-e2b-it",
// embedding models
"gemini-embedding-exp-03-07",
"text-embedding-004",
"embedding-001",
"gemini-embedding-001", "gemini-embedding-2-preview",
// imagen models
"imagen-4.0-generate-001", "imagen-4.0-ultra-generate-001",
"imagen-4.0-fast-generate-001",
// veo models
"veo-2.0-generate-001", "veo-3.0-generate-001", "veo-3.0-fast-generate-001",
"veo-3.1-generate-preview", "veo-3.1-fast-generate-preview",
// other models
"aqa",
}
var SafetySettingList = []string{

View File

@@ -15,8 +15,10 @@ var ModelList = []string{
"speech-01-hd",
"speech-01-turbo",
"MiniMax-M2.1",
"MiniMax-M2.1-lightning",
"MiniMax-M2.1-highspeed",
"MiniMax-M2",
"MiniMax-M2.5",
"MiniMax-M2.5-highspeed",
}
var ChannelName = "minimax"

View File

@@ -1,9 +1,11 @@
package moonshot
var ModelList = []string{
"moonshot-v1-8k",
"moonshot-v1-32k",
"moonshot-v1-128k",
"kimi-k2.5",
"kimi-k2-0905-preview",
"kimi-k2-turbo-preview",
"kimi-k2-thinking",
"kimi-k2-thinking-turbo",
}
var ChannelName = "moonshot"

View File

@@ -225,8 +225,12 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, header *http.Header, info *
}
}
if info.ChannelType == constant.ChannelTypeOpenRouter {
header.Set("HTTP-Referer", "https://www.newapi.ai")
header.Set("X-Title", "New API")
if header.Get("HTTP-Referer") == "" {
header.Set("HTTP-Referer", "https://www.newapi.ai")
}
if header.Get("X-OpenRouter-Title") == "" {
header.Set("X-OpenRouter-Title", "New API")
}
}
return nil
}
@@ -298,6 +302,7 @@ func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayIn
}
reasoning := openrouter.RequestReasoning{
Enabled: true,
MaxTokens: *thinking.BudgetTokens,
}

View File

@@ -3,14 +3,19 @@ package openai
var ModelList = []string{
"gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0125",
"gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-instruct",
"gpt-3.5-turbo-instruct", "gpt-3.5-turbo-instruct-0914",
"gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview",
"gpt-4-32k", "gpt-4-32k-0613",
"gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09",
"gpt-4-vision-preview",
"chatgpt-4o-latest",
"gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o-2024-11-20",
"gpt-4o-transcribe", "gpt-4o-transcribe-diarize",
"gpt-4o-search-preview", "gpt-4o-search-preview-2025-03-11",
"gpt-4o-mini", "gpt-4o-mini-2024-07-18",
"gpt-4o-mini-transcribe", "gpt-4o-mini-transcribe-2025-03-20", "gpt-4o-mini-transcribe-2025-12-15",
"gpt-4o-mini-tts", "gpt-4o-mini-tts-2025-03-20", "gpt-4o-mini-tts-2025-12-15",
"gpt-4o-mini-search-preview", "gpt-4o-mini-search-preview-2025-03-11",
"gpt-4.5-preview", "gpt-4.5-preview-2025-02-27",
"gpt-4.1", "gpt-4.1-2025-04-14",
"gpt-4.1-mini", "gpt-4.1-mini-2025-04-14",
@@ -31,17 +36,41 @@ var ModelList = []string{
"gpt-5", "gpt-5-2025-08-07", "gpt-5-chat-latest",
"gpt-5-mini", "gpt-5-mini-2025-08-07",
"gpt-5-nano", "gpt-5-nano-2025-08-07",
"gpt-4o-audio-preview", "gpt-4o-audio-preview-2024-10-01",
"gpt-4o-realtime-preview", "gpt-4o-realtime-preview-2024-10-01", "gpt-4o-realtime-preview-2024-12-17",
"gpt-5-codex",
"gpt-5-pro", "gpt-5-pro-2025-10-06",
"gpt-5-search-api", "gpt-5-search-api-2025-10-14",
"gpt-5.1", "gpt-5.1-2025-11-13", "gpt-5.1-chat-latest",
"gpt-5.1-codex", "gpt-5.1-codex-mini", "gpt-5.1-codex-max",
"gpt-5.2", "gpt-5.2-2025-12-11", "gpt-5.2-chat-latest",
"gpt-5.2-pro", "gpt-5.2-pro-2025-12-11",
"gpt-5.2-codex",
"gpt-5.3-chat-latest",
"gpt-5.3-codex",
"gpt-5.4", "gpt-5.4-2026-03-05",
"gpt-5.4-pro", "gpt-5.4-pro-2026-03-05",
"gpt-4o-audio-preview", "gpt-4o-audio-preview-2024-10-01", "gpt-4o-audio-preview-2024-12-17", "gpt-4o-audio-preview-2025-06-03",
"gpt-4o-realtime-preview", "gpt-4o-realtime-preview-2024-10-01", "gpt-4o-realtime-preview-2024-12-17", "gpt-4o-realtime-preview-2025-06-03",
"gpt-4o-mini-realtime-preview", "gpt-4o-mini-realtime-preview-2024-12-17",
"gpt-4o-mini-audio-preview", "gpt-4o-mini-audio-preview-2024-12-17",
"gpt-audio", "gpt-audio-2025-08-28",
"gpt-audio-mini", "gpt-audio-mini-2025-10-06", "gpt-audio-mini-2025-12-15",
"gpt-audio-1.5",
"gpt-realtime", "gpt-realtime-2025-08-28",
"gpt-realtime-mini", "gpt-realtime-mini-2025-10-06", "gpt-realtime-mini-2025-12-15",
"gpt-realtime-1.5",
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
"text-curie-001", "text-babbage-001", "text-ada-001",
"text-moderation-latest", "text-moderation-stable",
"omni-moderation-latest", "omni-moderation-2024-09-26",
"text-davinci-edit-001",
"davinci-002", "babbage-002",
"dall-e-3", "gpt-image-1",
"dall-e-2", "dall-e-3",
"gpt-image-1", "gpt-image-1-mini", "gpt-image-1.5",
"chatgpt-image-latest",
"whisper-1",
"tts-1", "tts-1-1106", "tts-1-hd", "tts-1-hd-1106",
"computer-use-preview", "computer-use-preview-2025-03-11",
"sora-2", "sora-2-pro",
}
var ChannelName = "openai"

View File

@@ -3,6 +3,7 @@ package openrouter
import "encoding/json"
type RequestReasoning struct {
Enabled bool `json:"enabled"`
// One of the following (not both):
Effort string `json:"effort,omitempty"` // Can be "high", "medium", or "low" (OpenAI-style)
MaxTokens int `json:"max_tokens,omitempty"` // Specific token limit (Anthropic-style)

View File

@@ -4,7 +4,9 @@ import (
"bytes"
"fmt"
"io"
"math"
"net/http"
"strconv"
"strings"
"time"
@@ -80,15 +82,28 @@ type responsePayload struct {
TaskId string `json:"task_id"`
TaskStatus string `json:"task_status"`
TaskStatusMsg string `json:"task_status_msg"`
TaskResult struct {
TaskInfo struct {
ExternalTaskId string `json:"external_task_id"`
} `json:"task_info"`
WatermarkInfo struct {
Enabled bool `json:"enabled"`
} `json:"watermark_info"`
TaskResult struct {
Videos []struct {
Id string `json:"id"`
Url string `json:"url"`
Duration string `json:"duration"`
Id string `json:"id"`
Url string `json:"url"`
WatermarkUrl string `json:"watermark_url"`
Duration string `json:"duration"`
} `json:"videos"`
Images []struct {
Index int `json:"index"`
Url string `json:"url"`
WatermarkUrl string `json:"watermark_url"`
} `json:"images"`
} `json:"task_result"`
CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
FinalUnitDeduction string `json:"final_unit_deduction"`
} `json:"data"`
}
@@ -338,15 +353,22 @@ func (a *TaskAdaptor) ParseTaskResult(respBody []byte) (*relaycommon.TaskInfo, e
taskInfo.Status = model.TaskStatusInProgress
case "succeed":
taskInfo.Status = model.TaskStatusSuccess
if videos := resPayload.Data.TaskResult.Videos; len(videos) > 0 {
video := videos[0]
taskInfo.Url = video.Url
}
if tokens, err := strconv.ParseFloat(resPayload.Data.FinalUnitDeduction, 64); err == nil {
rounded := int(math.Ceil(tokens))
if rounded > 0 {
taskInfo.CompletionTokens = rounded
taskInfo.TotalTokens = rounded
}
}
case "failed":
taskInfo.Status = model.TaskStatusFailure
default:
return nil, fmt.Errorf("unknown task status: %s", status)
}
if videos := resPayload.Data.TaskResult.Videos; len(videos) > 0 {
video := videos[0]
taskInfo.Url = video.Url
}
return taskInfo, nil
}
@@ -383,5 +405,12 @@ func (a *TaskAdaptor) ConvertToOpenAIVideo(originTask *model.Task) ([]byte, erro
Code: fmt.Sprintf("%d", klingResp.Code),
}
}
// https://app.klingai.com/cn/dev/document-api/apiReference/model/textToVideo
if data := klingResp.Data; data.TaskStatus == "failed" {
openAIVideo.Error = &dto.OpenAIVideoError{
Message: data.TaskStatusMsg,
}
}
return common.Marshal(openAIVideo)
}

View File

@@ -1,7 +1,7 @@
package zhipu_4v
var ModelList = []string{
"glm-4", "glm-4v", "glm-3-turbo", "glm-4-alltools", "glm-4-plus", "glm-4-0520", "glm-4-air", "glm-4-airx", "glm-4-long", "glm-4-flash", "glm-4v-plus", "glm-4.6",
"glm-4", "glm-4v", "glm-3-turbo", "glm-4-alltools", "glm-4-plus", "glm-4-0520", "glm-4-air", "glm-4-airx", "glm-4-long", "glm-4-flash", "glm-4v-plus", "glm-4.6", "glm-4.6v", "glm-4.7", "glm-4.7-flash", "glm-5",
}
var ChannelName = "zhipu_4v"

View File

@@ -59,7 +59,6 @@ func ClaudeHelper(c *gin.Context, info *relaycommon.RelayInfo) (newAPIError *typ
Type: "adaptive",
}
request.OutputConfig = json.RawMessage(fmt.Sprintf(`{"effort":"%s"}`, effortLevel))
request.TopP = common.GetPointer[float64](0)
request.Temperature = common.GetPointer[float64](1.0)
info.UpstreamModelName = request.Model
} else if model_setting.GetClaudeSettings().ThinkingAdapterEnabled &&
@@ -77,7 +76,6 @@ func ClaudeHelper(c *gin.Context, info *relaycommon.RelayInfo) (newAPIError *typ
}
// TODO: 临时处理
// https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#important-considerations-when-using-extended-thinking
request.TopP = common.GetPointer[float64](0)
request.Temperature = common.GetPointer[float64](1.0)
}
if !model_setting.ShouldPreserveThinkingSuffix(info.OriginModelName) {

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"net/http"
"regexp"
"sort"
"strconv"
"strings"
@@ -487,15 +488,35 @@ func applyOperations(jsonStr string, operations []ParamOperation, conditionConte
}
// 处理路径中的负数索引
opPath := processNegativeIndex(result, op.Path)
var opPaths []string
if isPathBasedOperation(op.Mode) {
opPaths, err = resolveOperationPaths(result, opPath)
if err != nil {
return "", err
}
if len(opPaths) == 0 {
continue
}
}
switch op.Mode {
case "delete":
result, err = sjson.Delete(result, opPath)
case "set":
if op.KeepOrigin && gjson.Get(result, opPath).Exists() {
continue
for _, path := range opPaths {
result, err = deleteValue(result, path)
if err != nil {
break
}
}
case "set":
for _, path := range opPaths {
if op.KeepOrigin && gjson.Get(result, path).Exists() {
continue
}
result, err = sjson.Set(result, path, op.Value)
if err != nil {
break
}
}
result, err = sjson.Set(result, opPath, op.Value)
case "move":
opFrom := processNegativeIndex(result, op.From)
opTo := processNegativeIndex(result, op.To)
@@ -508,27 +529,82 @@ func applyOperations(jsonStr string, operations []ParamOperation, conditionConte
opTo := processNegativeIndex(result, op.To)
result, err = copyValue(result, opFrom, opTo)
case "prepend":
result, err = modifyValue(result, opPath, op.Value, op.KeepOrigin, true)
for _, path := range opPaths {
result, err = modifyValue(result, path, op.Value, op.KeepOrigin, true)
if err != nil {
break
}
}
case "append":
result, err = modifyValue(result, opPath, op.Value, op.KeepOrigin, false)
for _, path := range opPaths {
result, err = modifyValue(result, path, op.Value, op.KeepOrigin, false)
if err != nil {
break
}
}
case "trim_prefix":
result, err = trimStringValue(result, opPath, op.Value, true)
for _, path := range opPaths {
result, err = trimStringValue(result, path, op.Value, true)
if err != nil {
break
}
}
case "trim_suffix":
result, err = trimStringValue(result, opPath, op.Value, false)
for _, path := range opPaths {
result, err = trimStringValue(result, path, op.Value, false)
if err != nil {
break
}
}
case "ensure_prefix":
result, err = ensureStringAffix(result, opPath, op.Value, true)
for _, path := range opPaths {
result, err = ensureStringAffix(result, path, op.Value, true)
if err != nil {
break
}
}
case "ensure_suffix":
result, err = ensureStringAffix(result, opPath, op.Value, false)
for _, path := range opPaths {
result, err = ensureStringAffix(result, path, op.Value, false)
if err != nil {
break
}
}
case "trim_space":
result, err = transformStringValue(result, opPath, strings.TrimSpace)
for _, path := range opPaths {
result, err = transformStringValue(result, path, strings.TrimSpace)
if err != nil {
break
}
}
case "to_lower":
result, err = transformStringValue(result, opPath, strings.ToLower)
for _, path := range opPaths {
result, err = transformStringValue(result, path, strings.ToLower)
if err != nil {
break
}
}
case "to_upper":
result, err = transformStringValue(result, opPath, strings.ToUpper)
for _, path := range opPaths {
result, err = transformStringValue(result, path, strings.ToUpper)
if err != nil {
break
}
}
case "replace":
result, err = replaceStringValue(result, opPath, op.From, op.To)
for _, path := range opPaths {
result, err = replaceStringValue(result, path, op.From, op.To)
if err != nil {
break
}
}
case "regex_replace":
result, err = regexReplaceStringValue(result, opPath, op.From, op.To)
for _, path := range opPaths {
result, err = regexReplaceStringValue(result, path, op.From, op.To)
if err != nil {
break
}
}
case "return_error":
returnErr, parseErr := parseParamOverrideReturnError(op.Value)
if parseErr != nil {
@@ -536,7 +612,12 @@ func applyOperations(jsonStr string, operations []ParamOperation, conditionConte
}
return "", returnErr
case "prune_objects":
result, err = pruneObjects(result, opPath, contextJSON, op.Value)
for _, path := range opPaths {
result, err = pruneObjects(result, path, contextJSON, op.Value)
if err != nil {
break
}
}
case "set_header":
err = setHeaderOverrideInContext(context, op.Path, op.Value, op.KeepOrigin)
if err == nil {
@@ -766,24 +847,30 @@ func resolveHeaderOverrideValueByMapping(context map[string]interface{}, headerN
return "", false, fmt.Errorf("header value mapping cannot be empty")
}
sourceValue, exists := getHeaderValueFromContext(context, headerName)
if !exists {
return "", false, nil
appendTokens, err := parseHeaderAppendTokens(mapping)
if err != nil {
return "", false, err
}
sourceTokens := splitHeaderListValue(sourceValue)
if len(sourceTokens) == 0 {
return "", false, nil
keepOnlyDeclared := parseHeaderKeepOnlyDeclared(mapping)
sourceValue, exists := getHeaderValueFromContext(context, headerName)
sourceTokens := make([]string, 0)
if exists {
sourceTokens = splitHeaderListValue(sourceValue)
}
wildcardValue, hasWildcard := mapping["*"]
resultTokens := make([]string, 0, len(sourceTokens))
resultTokens := make([]string, 0, len(sourceTokens)+len(appendTokens))
for _, token := range sourceTokens {
replacementRaw, hasReplacement := mapping[token]
if !hasReplacement && hasWildcard {
if !hasReplacement && hasWildcard && !keepOnlyDeclared {
replacementRaw = wildcardValue
hasReplacement = true
}
if !hasReplacement {
if keepOnlyDeclared {
continue
}
resultTokens = append(resultTokens, token)
continue
}
@@ -794,6 +881,7 @@ func resolveHeaderOverrideValueByMapping(context map[string]interface{}, headerN
resultTokens = append(resultTokens, replacementTokens...)
}
resultTokens = append(resultTokens, appendTokens...)
resultTokens = lo.Uniq(resultTokens)
if len(resultTokens) == 0 {
return "", false, nil
@@ -801,6 +889,26 @@ func resolveHeaderOverrideValueByMapping(context map[string]interface{}, headerN
return strings.Join(resultTokens, ","), true, nil
}
func parseHeaderAppendTokens(mapping map[string]interface{}) ([]string, error) {
appendRaw, ok := mapping["$append"]
if !ok {
return nil, nil
}
return parseHeaderReplacementTokens(appendRaw)
}
func parseHeaderKeepOnlyDeclared(mapping map[string]interface{}) bool {
keepOnlyDeclaredRaw, ok := mapping["$keep_only_declared"]
if !ok {
return false
}
keepOnlyDeclared, ok := keepOnlyDeclaredRaw.(bool)
if !ok {
return false
}
return keepOnlyDeclared
}
func parseHeaderReplacementTokens(value interface{}) ([]string, error) {
switch raw := value.(type) {
case nil:
@@ -1174,6 +1282,92 @@ func copyValue(jsonStr, fromPath, toPath string) (string, error) {
return sjson.Set(jsonStr, toPath, sourceValue.Value())
}
func isPathBasedOperation(mode string) bool {
switch mode {
case "delete", "set", "prepend", "append", "trim_prefix", "trim_suffix", "ensure_prefix", "ensure_suffix", "trim_space", "to_lower", "to_upper", "replace", "regex_replace", "prune_objects":
return true
default:
return false
}
}
func resolveOperationPaths(jsonStr, path string) ([]string, error) {
if !strings.Contains(path, "*") {
return []string{path}, nil
}
return expandWildcardPaths(jsonStr, path)
}
func expandWildcardPaths(jsonStr, path string) ([]string, error) {
var root interface{}
if err := common.Unmarshal([]byte(jsonStr), &root); err != nil {
return nil, err
}
segments := strings.Split(path, ".")
paths := collectWildcardPaths(root, segments, nil)
return lo.Uniq(paths), nil
}
func collectWildcardPaths(node interface{}, segments []string, prefix []string) []string {
if len(segments) == 0 {
return []string{strings.Join(prefix, ".")}
}
segment := strings.TrimSpace(segments[0])
if segment == "" {
return nil
}
isLast := len(segments) == 1
if segment == "*" {
switch typed := node.(type) {
case map[string]interface{}:
keys := lo.Keys(typed)
sort.Strings(keys)
return lo.FlatMap(keys, func(key string, _ int) []string {
return collectWildcardPaths(typed[key], segments[1:], append(prefix, key))
})
case []interface{}:
return lo.FlatMap(lo.Range(len(typed)), func(index int, _ int) []string {
return collectWildcardPaths(typed[index], segments[1:], append(prefix, strconv.Itoa(index)))
})
default:
return nil
}
}
switch typed := node.(type) {
case map[string]interface{}:
if isLast {
return []string{strings.Join(append(prefix, segment), ".")}
}
next, exists := typed[segment]
if !exists {
return nil
}
return collectWildcardPaths(next, segments[1:], append(prefix, segment))
case []interface{}:
index, err := strconv.Atoi(segment)
if err != nil || index < 0 || index >= len(typed) {
return nil
}
if isLast {
return []string{strings.Join(append(prefix, segment), ".")}
}
return collectWildcardPaths(typed[index], segments[1:], append(prefix, segment))
default:
return nil
}
}
func deleteValue(jsonStr, path string) (string, error) {
if strings.TrimSpace(path) == "" {
return jsonStr, nil
}
return sjson.Delete(jsonStr, path)
}
func modifyValue(jsonStr, path string, value interface{}, keepOrigin, isPrepend bool) (string, error) {
current := gjson.Get(jsonStr, path)
switch {

View File

@@ -2,6 +2,7 @@ package common
import (
"encoding/json"
"fmt"
"reflect"
"testing"
@@ -9,6 +10,7 @@ import (
"github.com/QuantumNous/new-api/dto"
"github.com/QuantumNous/new-api/setting/model_setting"
"github.com/samber/lo"
)
func TestApplyParamOverrideTrimPrefix(t *testing.T) {
@@ -242,6 +244,224 @@ func TestApplyParamOverrideDelete(t *testing.T) {
}
}
func TestApplyParamOverrideDeleteWildcardPath(t *testing.T) {
input := []byte(`{"tools":[{"type":"bash","custom":{"input_examples":["a"],"other":1}},{"type":"code","custom":{"input_examples":["b"]}},{"type":"noop","custom":{"other":2}}]}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "tools.*.custom.input_examples",
"mode": "delete",
},
},
}
out, err := ApplyParamOverride(input, override, nil)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
assertJSONEqual(t, `{"tools":[{"type":"bash","custom":{"other":1}},{"type":"code","custom":{}},{"type":"noop","custom":{"other":2}}]}`, string(out))
}
func TestApplyParamOverrideSetWildcardPath(t *testing.T) {
input := []byte(`{"tools":[{"custom":{"tag":"A"}},{"custom":{"tag":"B"}},{"custom":{"tag":"C"}}]}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "tools.*.custom.enabled",
"mode": "set",
"value": true,
},
},
}
out, err := ApplyParamOverride(input, override, nil)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
var got struct {
Tools []struct {
Custom struct {
Enabled bool `json:"enabled"`
} `json:"custom"`
} `json:"tools"`
}
if err := json.Unmarshal(out, &got); err != nil {
t.Fatalf("failed to unmarshal output JSON: %v", err)
}
if !lo.EveryBy(got.Tools, func(item struct {
Custom struct {
Enabled bool `json:"enabled"`
} `json:"custom"`
}) bool {
return item.Custom.Enabled
}) {
t.Fatalf("expected wildcard set to enable all tools, got: %s", string(out))
}
}
func TestApplyParamOverrideTrimSpaceWildcardPath(t *testing.T) {
input := []byte(`{"tools":[{"custom":{"name":" alpha "}},{"custom":{"name":" beta"}},{"custom":{"name":"gamma "}}]}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "tools.*.custom.name",
"mode": "trim_space",
},
},
}
out, err := ApplyParamOverride(input, override, nil)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
var got struct {
Tools []struct {
Custom struct {
Name string `json:"name"`
} `json:"custom"`
} `json:"tools"`
}
if err := json.Unmarshal(out, &got); err != nil {
t.Fatalf("failed to unmarshal output JSON: %v", err)
}
names := lo.Map(got.Tools, func(item struct {
Custom struct {
Name string `json:"name"`
} `json:"custom"`
}, _ int) string {
return item.Custom.Name
})
if !reflect.DeepEqual(names, []string{"alpha", "beta", "gamma"}) {
t.Fatalf("unexpected names after wildcard trim_space: %v", names)
}
}
func TestApplyParamOverrideDeleteWildcardEqualsIndexedPaths(t *testing.T) {
input := []byte(`{"tools":[{"custom":{"input_examples":["a"],"other":1}},{"custom":{"input_examples":["b"],"other":2}},{"custom":{"input_examples":["c"],"other":3}}]}`)
wildcardOverride := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "tools.*.custom.input_examples",
"mode": "delete",
},
},
}
indexedOverride := map[string]interface{}{
"operations": lo.Map(lo.Range(3), func(index int, _ int) interface{} {
return map[string]interface{}{
"path": fmt.Sprintf("tools.%d.custom.input_examples", index),
"mode": "delete",
}
}),
}
wildcardOut, err := ApplyParamOverride(input, wildcardOverride, nil)
if err != nil {
t.Fatalf("wildcard ApplyParamOverride returned error: %v", err)
}
indexedOut, err := ApplyParamOverride(input, indexedOverride, nil)
if err != nil {
t.Fatalf("indexed ApplyParamOverride returned error: %v", err)
}
assertJSONEqual(t, string(indexedOut), string(wildcardOut))
}
func TestApplyParamOverrideSetWildcardKeepOrigin(t *testing.T) {
input := []byte(`{"tools":[{"custom":{"tag":"A"}},{"custom":{"tag":"B","enabled":false}},{"custom":{"tag":"C"}}]}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "tools.*.custom.enabled",
"mode": "set",
"value": true,
"keep_origin": true,
},
},
}
out, err := ApplyParamOverride(input, override, nil)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
var got struct {
Tools []struct {
Custom struct {
Enabled bool `json:"enabled"`
} `json:"custom"`
} `json:"tools"`
}
if err := json.Unmarshal(out, &got); err != nil {
t.Fatalf("failed to unmarshal output JSON: %v", err)
}
enabledValues := lo.Map(got.Tools, func(item struct {
Custom struct {
Enabled bool `json:"enabled"`
} `json:"custom"`
}, _ int) bool {
return item.Custom.Enabled
})
if !reflect.DeepEqual(enabledValues, []bool{true, false, true}) {
t.Fatalf("unexpected enabled values after wildcard keep_origin set: %v", enabledValues)
}
}
func TestApplyParamOverrideTrimSpaceMultiWildcardPath(t *testing.T) {
input := []byte(`{"tools":[{"custom":{"items":[{"name":" alpha "},{"name":" beta "}]}},{"custom":{"items":[{"name":" gamma"}]}}]}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "tools.*.custom.items.*.name",
"mode": "trim_space",
},
},
}
out, err := ApplyParamOverride(input, override, nil)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
var got struct {
Tools []struct {
Custom struct {
Items []struct {
Name string `json:"name"`
} `json:"items"`
} `json:"custom"`
} `json:"tools"`
}
if err := json.Unmarshal(out, &got); err != nil {
t.Fatalf("failed to unmarshal output JSON: %v", err)
}
names := lo.FlatMap(got.Tools, func(tool struct {
Custom struct {
Items []struct {
Name string `json:"name"`
} `json:"items"`
} `json:"custom"`
}, _ int) []string {
return lo.Map(tool.Custom.Items, func(item struct {
Name string `json:"name"`
}, _ int) string {
return item.Name
})
})
if !reflect.DeepEqual(names, []string{"alpha", "beta", "gamma"}) {
t.Fatalf("unexpected names after multi wildcard trim_space: %v", names)
}
}
func TestApplyParamOverrideSet(t *testing.T) {
input := []byte(`{"model":"gpt-4","temperature":0.7}`)
override := map[string]interface{}{
@@ -261,6 +481,42 @@ func TestApplyParamOverrideSet(t *testing.T) {
assertJSONEqual(t, `{"model":"gpt-4","temperature":0.1}`, string(out))
}
func TestApplyParamOverrideSetWithDescriptionKeepsCompatibility(t *testing.T) {
input := []byte(`{"model":"gpt-4","temperature":0.7}`)
overrideWithoutDesc := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"path": "temperature",
"mode": "set",
"value": 0.1,
},
},
}
overrideWithDesc := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"description": "set temperature for deterministic output",
"path": "temperature",
"mode": "set",
"value": 0.1,
},
},
}
outWithoutDesc, err := ApplyParamOverride(input, overrideWithoutDesc, nil)
if err != nil {
t.Fatalf("ApplyParamOverride without description returned error: %v", err)
}
outWithDesc, err := ApplyParamOverride(input, overrideWithDesc, nil)
if err != nil {
t.Fatalf("ApplyParamOverride with description returned error: %v", err)
}
assertJSONEqual(t, string(outWithoutDesc), string(outWithDesc))
assertJSONEqual(t, `{"model":"gpt-4","temperature":0.1}`, string(outWithDesc))
}
func TestApplyParamOverrideSetKeepOrigin(t *testing.T) {
input := []byte(`{"model":"gpt-4","temperature":0.7}`)
override := map[string]interface{}{
@@ -1397,6 +1653,141 @@ func TestApplyParamOverrideSetHeaderMapDeleteWholeHeaderWhenAllTokensCleared(t *
}
}
func TestApplyParamOverrideSetHeaderMapAppendsTokens(t *testing.T) {
input := []byte(`{"temperature":0.7}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"mode": "set_header",
"path": "anthropic-beta",
"value": map[string]interface{}{
"$append": []interface{}{"context-1m-2025-08-07", "computer-use-2025-01-24"},
},
},
},
}
ctx := map[string]interface{}{
"header_override": map[string]interface{}{
"anthropic-beta": "computer-use-2025-01-24",
},
}
out, err := ApplyParamOverride(input, override, ctx)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
assertJSONEqual(t, `{"temperature":0.7}`, string(out))
headers, ok := ctx["header_override"].(map[string]interface{})
if !ok {
t.Fatalf("expected header_override context map")
}
if headers["anthropic-beta"] != "computer-use-2025-01-24,context-1m-2025-08-07" {
t.Fatalf("expected anthropic-beta to append new token without duplicates, got: %v", headers["anthropic-beta"])
}
}
func TestApplyParamOverrideSetHeaderMapAppendsTokensWhenHeaderMissing(t *testing.T) {
input := []byte(`{"temperature":0.7}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"mode": "set_header",
"path": "anthropic-beta",
"value": map[string]interface{}{
"$append": []interface{}{"context-1m-2025-08-07", "computer-use-2025-01-24"},
},
},
},
}
ctx := map[string]interface{}{}
out, err := ApplyParamOverride(input, override, ctx)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
assertJSONEqual(t, `{"temperature":0.7}`, string(out))
headers, ok := ctx["header_override"].(map[string]interface{})
if !ok {
t.Fatalf("expected header_override context map")
}
if headers["anthropic-beta"] != "context-1m-2025-08-07,computer-use-2025-01-24" {
t.Fatalf("expected anthropic-beta to be created from appended tokens, got: %v", headers["anthropic-beta"])
}
}
func TestApplyParamOverrideSetHeaderMapKeepOnlyDeclaredDropsUndeclaredTokens(t *testing.T) {
input := []byte(`{"temperature":0.7}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"mode": "set_header",
"path": "anthropic-beta",
"value": map[string]interface{}{
"computer-use-2025-01-24": "computer-use-2025-01-24",
"$append": []interface{}{"context-1m-2025-08-07"},
"$keep_only_declared": true,
},
},
},
}
ctx := map[string]interface{}{
"header_override": map[string]interface{}{
"anthropic-beta": "advanced-tool-use-2025-11-20,computer-use-2025-01-24",
},
}
out, err := ApplyParamOverride(input, override, ctx)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
assertJSONEqual(t, `{"temperature":0.7}`, string(out))
headers, ok := ctx["header_override"].(map[string]interface{})
if !ok {
t.Fatalf("expected header_override context map")
}
if headers["anthropic-beta"] != "computer-use-2025-01-24,context-1m-2025-08-07" {
t.Fatalf("expected anthropic-beta to keep only declared tokens, got: %v", headers["anthropic-beta"])
}
}
func TestApplyParamOverrideSetHeaderMapKeepOnlyDeclaredDeletesHeaderWhenNothingDeclaredMatches(t *testing.T) {
input := []byte(`{"temperature":0.7}`)
override := map[string]interface{}{
"operations": []interface{}{
map[string]interface{}{
"mode": "set_header",
"path": "anthropic-beta",
"value": map[string]interface{}{
"computer-use-2025-01-24": "computer-use-2025-01-24",
"$keep_only_declared": true,
},
},
},
}
ctx := map[string]interface{}{
"header_override": map[string]interface{}{
"anthropic-beta": "advanced-tool-use-2025-11-20",
},
}
out, err := ApplyParamOverride(input, override, ctx)
if err != nil {
t.Fatalf("ApplyParamOverride returned error: %v", err)
}
assertJSONEqual(t, `{"temperature":0.7}`, string(out))
headers, ok := ctx["header_override"].(map[string]interface{})
if !ok {
t.Fatalf("expected header_override context map")
}
if _, exists := headers["anthropic-beta"]; exists {
t.Fatalf("expected anthropic-beta to be deleted when no declared tokens remain, got: %v", headers["anthropic-beta"])
}
}
func TestApplyParamOverrideConditionsObjectShorthand(t *testing.T) {
input := []byte(`{"temperature":0.7}`)
override := map[string]interface{}{

View File

@@ -140,18 +140,31 @@ func ModelPriceHelper(c *gin.Context, info *relaycommon.RelayInfo, promptTokens
}
// ModelPriceHelperPerCall 按次计费的 PriceHelper (MJ、Task)
func ModelPriceHelperPerCall(c *gin.Context, info *relaycommon.RelayInfo) types.PriceData {
func ModelPriceHelperPerCall(c *gin.Context, info *relaycommon.RelayInfo) (types.PriceData, error) {
groupRatioInfo := HandleGroupRatio(c, info)
modelPrice, success := ratio_setting.GetModelPrice(info.OriginModelName, true)
// 如果没有配置价格,则使用默认价格
// 如果没有配置价格,检查模型倍率配置
if !success {
// 没有配置费用,也要使用默认费用,否则按费率计费模型无法使用
defaultPrice, ok := ratio_setting.GetDefaultModelPriceMap()[info.OriginModelName]
if !ok {
modelPrice = 0.1
} else {
if ok {
modelPrice = defaultPrice
} else {
// 没有配置倍率也不接受没配置,那就返回错误
_, ratioSuccess, matchName := ratio_setting.GetModelRatio(info.OriginModelName)
acceptUnsetRatio := false
if info.UserSetting.AcceptUnsetRatioModel {
acceptUnsetRatio = true
}
if !ratioSuccess && !acceptUnsetRatio {
return types.PriceData{}, fmt.Errorf("模型 %s 倍率或价格未配置请联系管理员设置或开始自用模式Model %s ratio or price not set, please set or start self-use mode", matchName, matchName)
}
// 未配置价格但配置了倍率,使用默认预扣价格
modelPrice = float64(common.PreConsumedQuota) / common.QuotaPerUnit
}
}
quota := int(modelPrice * common.QuotaPerUnit * groupRatioInfo.GroupRatio)
@@ -170,7 +183,7 @@ func ModelPriceHelperPerCall(c *gin.Context, info *relaycommon.RelayInfo) types.
Quota: quota,
GroupRatioInfo: groupRatioInfo,
}
return priceData
return priceData, nil
}
func ContainPriceOrRatio(modelName string) bool {

View File

@@ -186,7 +186,13 @@ func RelaySwapFace(c *gin.Context, info *relaycommon.RelayInfo) *dto.MidjourneyR
}
modelName := service.CovertMjpActionToModelName(constant.MjActionSwapFace)
priceData := helper.ModelPriceHelperPerCall(c, info)
priceData, err := helper.ModelPriceHelperPerCall(c, info)
if err != nil {
return &dto.MidjourneyResponse{
Code: 4,
Description: err.Error(),
}
}
userQuota, err := model.GetUserQuota(info.UserId, false)
if err != nil {
@@ -487,7 +493,13 @@ func RelayMidjourneySubmit(c *gin.Context, relayInfo *relaycommon.RelayInfo) *dt
modelName := service.CovertMjpActionToModelName(midjRequest.Action)
priceData := helper.ModelPriceHelperPerCall(c, relayInfo)
priceData, err := helper.ModelPriceHelperPerCall(c, relayInfo)
if err != nil {
return &dto.MidjourneyResponse{
Code: 4,
Description: err.Error(),
}
}
userQuota, err := model.GetUserQuota(relayInfo.UserId, false)
if err != nil {

View File

@@ -41,6 +41,8 @@ func ResolveOriginTask(c *gin.Context, info *relaycommon.RelayInfo) *dto.TaskErr
if strings.Contains(path, "/v1/videos/") && strings.HasSuffix(path, "/remix") {
info.Action = constant.TaskActionRemix
}
// 提取 remix 任务的 video_id
if info.Action == constant.TaskActionRemix {
videoID := c.Param("video_id")
if strings.TrimSpace(videoID) == "" {
@@ -176,7 +178,11 @@ func RelayTaskSubmit(c *gin.Context, info *relaycommon.RelayInfo) (*TaskSubmitRe
// 4. 价格计算:基础模型价格
info.OriginModelName = modelName
info.PriceData = helper.ModelPriceHelperPerCall(c, info)
priceData, err := helper.ModelPriceHelperPerCall(c, info)
if err != nil {
return nil, service.TaskErrorWrapper(err, "model_price_error", http.StatusBadRequest)
}
info.PriceData = priceData
// 5. 计费估算:让适配器根据用户请求提供 OtherRatios时长、分辨率等
// 必须在 ModelPriceHelperPerCall 之后调用(它会重建 PriceData

View File

@@ -237,6 +237,10 @@ func SetApiRouter(router *gin.Engine) {
channelRoute.GET("/tag/models", controller.GetTagModels)
channelRoute.POST("/copy/:id", controller.CopyChannel)
channelRoute.POST("/multi_key/manage", controller.ManageMultiKeys)
channelRoute.POST("/upstream_updates/apply", controller.ApplyChannelUpstreamModelUpdates)
channelRoute.POST("/upstream_updates/apply_all", controller.ApplyAllChannelUpstreamModelUpdates)
channelRoute.POST("/upstream_updates/detect", controller.DetectChannelUpstreamModelUpdates)
channelRoute.POST("/upstream_updates/detect_all", controller.DetectAllChannelUpstreamModelUpdates)
}
tokenRoute := apiRouter.Group("/token")
tokenRoute.Use(middleware.UserAuth())
@@ -244,6 +248,7 @@ func SetApiRouter(router *gin.Engine) {
tokenRoute.GET("/", controller.GetAllTokens)
tokenRoute.GET("/search", middleware.SearchRateLimit(), controller.SearchTokens)
tokenRoute.GET("/:id", controller.GetToken)
tokenRoute.POST("/:id/key", middleware.CriticalRateLimit(), middleware.DisableCache(), controller.GetTokenKey)
tokenRoute.POST("/", controller.AddToken)
tokenRoute.PUT("/", controller.UpdateToken)
tokenRoute.DELETE("/:id", controller.DeleteToken)

View File

@@ -214,7 +214,7 @@ func registerMjRouterGroup(relayMjRouter *gin.RouterGroup) {
relayMjRouter.POST("/submit/blend", controller.RelayMidjourney)
relayMjRouter.POST("/submit/edits", controller.RelayMidjourney)
relayMjRouter.POST("/submit/video", controller.RelayMidjourney)
relayMjRouter.POST("/notify", controller.RelayMidjourney)
//relayMjRouter.POST("/notify", controller.RelayMidjourney)
relayMjRouter.GET("/task/:id/fetch", controller.RelayMidjourney)
relayMjRouter.GET("/task/:id/image-seed", controller.RelayMidjourney)
relayMjRouter.POST("/task/list-by-condition", controller.RelayMidjourney)

View File

@@ -34,22 +34,34 @@ func ClaudeToOpenAIRequest(claudeRequest dto.ClaudeRequest, info *relaycommon.Re
isOpenRouter := info.ChannelType == constant.ChannelTypeOpenRouter
if claudeRequest.Thinking != nil && claudeRequest.Thinking.Type == "enabled" {
if isOpenRouter {
reasoning := openrouter.RequestReasoning{
MaxTokens: claudeRequest.Thinking.GetBudgetTokens(),
if isOpenRouter {
if effort := claudeRequest.GetEfforts(); effort != "" {
effortBytes, _ := json.Marshal(effort)
openAIRequest.Verbosity = effortBytes
}
if claudeRequest.Thinking != nil {
var reasoning openrouter.RequestReasoning
if claudeRequest.Thinking.Type == "enabled" {
reasoning = openrouter.RequestReasoning{
Enabled: true,
MaxTokens: claudeRequest.Thinking.GetBudgetTokens(),
}
} else if claudeRequest.Thinking.Type == "adaptive" {
reasoning = openrouter.RequestReasoning{
Enabled: true,
}
}
reasoningJSON, err := json.Marshal(reasoning)
if err != nil {
return nil, fmt.Errorf("failed to marshal reasoning: %w", err)
}
openAIRequest.Reasoning = reasoningJSON
} else {
thinkingSuffix := "-thinking"
if strings.HasSuffix(info.OriginModelName, thinkingSuffix) &&
!strings.HasSuffix(openAIRequest.Model, thinkingSuffix) {
openAIRequest.Model = openAIRequest.Model + thinkingSuffix
}
}
} else {
thinkingSuffix := "-thinking"
if strings.HasSuffix(info.OriginModelName, thinkingSuffix) &&
!strings.HasSuffix(openAIRequest.Model, thinkingSuffix) {
openAIRequest.Model = openAIRequest.Model + thinkingSuffix
}
}

View File

@@ -222,13 +222,13 @@ func RecalculateTaskQuota(ctx context.Context, task *model.Task, actualQuota int
}
other := taskBillingOther(task)
other["task_id"] = task.TaskID
other["reason"] = reason
//other["reason"] = reason
other["pre_consumed_quota"] = preConsumedQuota
other["actual_quota"] = actualQuota
model.RecordTaskBillingLog(model.RecordTaskBillingLogParams{
UserId: task.UserId,
LogType: logType,
Content: "",
Content: reason,
ChannelId: task.ChannelId,
ModelName: taskModelName(task),
Quota: logQuota,

View File

@@ -125,8 +125,8 @@ func makeTask(userId, channelId, quota, tokenId int, billingSource string, subsc
SubscriptionId: subscriptionId,
TokenId: tokenId,
BillingContext: &model.TaskBillingContext{
ModelPrice: 0.02,
GroupRatio: 1.0,
ModelPrice: 0.02,
GroupRatio: 1.0,
OriginModelName: "test-model",
},
},
@@ -615,9 +615,11 @@ type mockAdaptor struct {
adjustReturn int
}
func (m *mockAdaptor) Init(_ *relaycommon.RelayInfo) {}
func (m *mockAdaptor) FetchTask(string, string, map[string]any, string) (*http.Response, error) { return nil, nil }
func (m *mockAdaptor) ParseTaskResult([]byte) (*relaycommon.TaskInfo, error) { return nil, nil }
func (m *mockAdaptor) Init(_ *relaycommon.RelayInfo) {}
func (m *mockAdaptor) FetchTask(string, string, map[string]any, string) (*http.Response, error) {
return nil, nil
}
func (m *mockAdaptor) ParseTaskResult([]byte) (*relaycommon.TaskInfo, error) { return nil, nil }
func (m *mockAdaptor) AdjustBillingOnComplete(_ *model.Task, _ *relaycommon.TaskInfo) int {
return m.adjustReturn
}

View File

@@ -22,6 +22,32 @@ func NotifyRootUser(t string, subject string, content string) {
}
}
func NotifyUpstreamModelUpdateWatchers(subject string, content string) {
var users []model.User
if err := model.DB.
Select("id", "email", "role", "status", "setting").
Where("status = ? AND role >= ?", common.UserStatusEnabled, common.RoleAdminUser).
Find(&users).Error; err != nil {
common.SysLog(fmt.Sprintf("failed to query upstream update notification users: %s", err.Error()))
return
}
notification := dto.NewNotify(dto.NotifyTypeChannelUpdate, subject, content, nil)
sentCount := 0
for _, user := range users {
userSetting := user.GetSetting()
if !userSetting.UpstreamModelUpdateNotifyEnabled {
continue
}
if err := NotifyUser(user.Id, user.Email, userSetting, notification); err != nil {
common.SysLog(fmt.Sprintf("failed to notify user %d for upstream model update: %s", user.Id, err.Error()))
continue
}
sentCount++
}
common.SysLog(fmt.Sprintf("upstream model update notifications sent: %d", sentCount))
}
func NotifyUser(userId int, userEmail string, userSetting dto.UserSetting, data dto.Notify) error {
notifyType := userSetting.NotifyType
if notifyType == "" {

View File

@@ -13,9 +13,9 @@ var Chats = []map[string]string{
{
"Cherry Studio": "cherrystudio://providers/api-keys?v=1&data={cherryConfig}",
},
//{
// "AionUI": "aionui://provider/add?v=1&data={aionuiConfig}",
//},
{
"AionUI": "aionui://provider/add?v=1&data={aionuiConfig}",
},
{
"流畅阅读": "fluentread",
},

View File

@@ -2,6 +2,7 @@ package model_setting
import (
"net/http"
"strings"
"github.com/QuantumNous/new-api/setting/config"
)
@@ -50,23 +51,36 @@ func GetClaudeSettings() *ClaudeSettings {
func (c *ClaudeSettings) WriteHeaders(originModel string, httpHeader *http.Header) {
if headers, ok := c.HeadersSettings[originModel]; ok {
for headerKey, headerValues := range headers {
// get existing values for this header key
existingValues := httpHeader.Values(headerKey)
existingValuesMap := make(map[string]bool)
for _, v := range existingValues {
existingValuesMap[v] = true
}
// add only values that don't already exist
for _, headerValue := range headerValues {
if !existingValuesMap[headerValue] {
httpHeader.Add(headerKey, headerValue)
}
mergedValues := normalizeHeaderListValues(
append(append([]string(nil), httpHeader.Values(headerKey)...), headerValues...),
)
if len(mergedValues) == 0 {
continue
}
httpHeader.Set(headerKey, strings.Join(mergedValues, ","))
}
}
}
func normalizeHeaderListValues(values []string) []string {
normalizedValues := make([]string, 0, len(values))
seenValues := make(map[string]struct{}, len(values))
for _, value := range values {
for _, item := range strings.Split(value, ",") {
normalizedItem := strings.TrimSpace(item)
if normalizedItem == "" {
continue
}
if _, exists := seenValues[normalizedItem]; exists {
continue
}
seenValues[normalizedItem] = struct{}{}
normalizedValues = append(normalizedValues, normalizedItem)
}
}
return normalizedValues
}
func (c *ClaudeSettings) GetDefaultMaxTokens(model string) int {
if maxTokens, ok := c.DefaultMaxTokens[model]; ok {
return maxTokens

View File

@@ -5,6 +5,8 @@ import (
"sort"
"strconv"
"strings"
"github.com/QuantumNous/new-api/types"
)
type StatusCodeRange struct {
@@ -31,6 +33,10 @@ var alwaysSkipRetryStatusCodes = map[int]struct{}{
524: {},
}
var alwaysSkipRetryCodes = map[types.ErrorCode]struct{}{
types.ErrorCodeBadResponseBody: {},
}
func AutomaticDisableStatusCodesToString() string {
return statusCodeRangesToString(AutomaticDisableStatusCodeRanges)
}
@@ -66,6 +72,11 @@ func IsAlwaysSkipRetryStatusCode(code int) bool {
return exists
}
func IsAlwaysSkipRetryCode(errorCode types.ErrorCode) bool {
_, exists := alwaysSkipRetryCodes[errorCode]
return exists
}
func ShouldRetryByStatusCode(code int) bool {
if IsAlwaysSkipRetryStatusCode(code) {
return false

View File

@@ -452,6 +452,44 @@ func GetCompletionRatio(name string) float64 {
return hardCodedRatio
}
type CompletionRatioInfo struct {
Ratio float64 `json:"ratio"`
Locked bool `json:"locked"`
}
func GetCompletionRatioInfo(name string) CompletionRatioInfo {
name = FormatMatchingModelName(name)
if strings.Contains(name, "/") {
if ratio, ok := completionRatioMap.Get(name); ok {
return CompletionRatioInfo{
Ratio: ratio,
Locked: false,
}
}
}
hardCodedRatio, locked := getHardcodedCompletionModelRatio(name)
if locked {
return CompletionRatioInfo{
Ratio: hardCodedRatio,
Locked: true,
}
}
if ratio, ok := completionRatioMap.Get(name); ok {
return CompletionRatioInfo{
Ratio: ratio,
Locked: false,
}
}
return CompletionRatioInfo{
Ratio: hardCodedRatio,
Locked: false,
}
}
func getHardcodedCompletionModelRatio(name string) (float64, bool) {
isReservedModel := strings.HasSuffix(name, "-all") || strings.HasSuffix(name, "-gizmo-*")
@@ -471,6 +509,9 @@ func getHardcodedCompletionModelRatio(name string) (float64, bool) {
}
// gpt-5 匹配
if strings.HasPrefix(name, "gpt-5") {
if strings.HasPrefix(name, "gpt-5.4") {
return 6, true
}
return 8, true
}
// gpt-4.5-preview匹配

View File

@@ -21,7 +21,7 @@ import { defineConfig } from 'i18next-cli';
/** @type {import('i18next-cli').I18nextToolkitConfig} */
export default defineConfig({
locales: ['zh', 'en', 'fr', 'ru', 'ja', 'vi'],
locales: ['zh-CN', 'zh-TW', 'en', 'fr', 'ru', 'ja', 'vi'],
extract: {
input: ['src/**/*.{js,jsx,ts,tsx}'],
ignore: ['src/i18n/**/*'],

8
web/index.html vendored
View File

@@ -7,7 +7,13 @@
<meta name="theme-color" content="#ffffff" />
<meta
name="description"
content="OpenAI 接口聚合管理,支持多种渠道包括 Azure可用于二次分发管理 key仅单可执行文件已打包好 Docker 镜像,一键部署,开箱即用"
lang="zh"
content="统一的 AI 模型聚合与分发网关,支持将各类大语言模型跨格式转换为 OpenAI、Claude、Gemini 兼容接口,为个人与企业提供集中式模型管理与网关服务。"
/>
<meta
name="description"
lang="en"
content="A unified AI model hub for aggregation & distribution. It supports cross-converting various LLMs into OpenAI-compatible, Claude-compatible, or Gemini-compatible formats. A centralized gateway for personal and enterprise model management."
/>
<meta name="generator" content="new-api" />
<title>New API</title>

View File

@@ -23,7 +23,6 @@ import { useContainerWidth } from '../../../hooks/common/useContainerWidth';
import {
Divider,
Button,
Tag,
Row,
Col,
Collapsible,
@@ -46,6 +45,7 @@ import { IconChevronDown, IconChevronUp } from '@douyinfe/semi-icons';
* @param {number} collapseHeight 折叠时的高度默认200
* @param {boolean} withCheckbox 是否启用前缀 Checkbox 来控制激活状态
* @param {boolean} loading 是否处于加载状态
* @param {string} variant 颜色变体: 'violet' | 'teal' | 'amber' | 'rose' | 'green',不传则使用默认蓝色
*/
const SelectableButtonGroup = ({
title,
@@ -58,6 +58,7 @@ const SelectableButtonGroup = ({
collapseHeight = 200,
withCheckbox = false,
loading = false,
variant,
}) => {
const [isOpen, setIsOpen] = useState(false);
const [skeletonCount] = useState(12);
@@ -178,9 +179,6 @@ const SelectableButtonGroup = ({
) : (
<Row gutter={gutterSize} style={{ lineHeight: '32px', ...style }}>
{items.map((item) => {
const isDisabled =
item.disabled ||
(typeof item.tagCount === 'number' && item.tagCount === 0);
const isActive = Array.isArray(activeValue)
? activeValue.includes(item.value)
: activeValue === item.value;
@@ -194,13 +192,11 @@ const SelectableButtonGroup = ({
}}
theme={isActive ? 'light' : 'outline'}
type={isActive ? 'primary' : 'tertiary'}
disabled={isDisabled}
className='sbg-button'
icon={
<Checkbox
checked={isActive}
onChange={() => onChange(item.value)}
disabled={isDisabled}
style={{ pointerEvents: 'auto' }}
/>
}
@@ -210,14 +206,9 @@ const SelectableButtonGroup = ({
{item.icon && <span className='sbg-icon'>{item.icon}</span>}
<ConditionalTooltipText text={item.label} />
{item.tagCount !== undefined && shouldShowTags && (
<Tag
className='sbg-tag'
color='white'
shape='circle'
size='small'
>
<span className={`sbg-badge ${isActive ? 'sbg-badge-active' : ''}`}>
{item.tagCount}
</Tag>
</span>
)}
</div>
</Button>
@@ -231,22 +222,16 @@ const SelectableButtonGroup = ({
onClick={() => onChange(item.value)}
theme={isActive ? 'light' : 'outline'}
type={isActive ? 'primary' : 'tertiary'}
disabled={isDisabled}
className='sbg-button'
style={{ width: '100%' }}
>
<div className='sbg-content'>
{item.icon && <span className='sbg-icon'>{item.icon}</span>}
<ConditionalTooltipText text={item.label} />
{item.tagCount !== undefined && shouldShowTags && (
<Tag
className='sbg-tag'
color='white'
shape='circle'
size='small'
>
{item.tagCount !== undefined && shouldShowTags && item.tagCount !== '' && (
<span className={`sbg-badge ${isActive ? 'sbg-badge-active' : ''}`}>
{item.tagCount}
</Tag>
</span>
)}
</div>
</Button>
@@ -258,7 +243,7 @@ const SelectableButtonGroup = ({
return (
<div
className={`mb-8 ${containerWidth <= 400 ? 'sbg-compact' : ''}`}
className={`mb-8 ${containerWidth <= 400 ? 'sbg-compact' : ''}${variant ? ` sbg-variant-${variant}` : ''}`}
ref={containerRef}
>
{title && (

View File

@@ -37,10 +37,11 @@ import {
import { UserContext } from '../../context/User';
import { StatusContext } from '../../context/Status';
import { useLocation } from 'react-router-dom';
import { normalizeLanguage } from '../../i18n/language';
const { Sider, Content, Header } = Layout;
const PageLayout = () => {
const [, userDispatch] = useContext(UserContext);
const [userState, userDispatch] = useContext(UserContext);
const [, statusDispatch] = useContext(StatusContext);
const isMobile = useIsMobile();
const [collapsed, , setCollapsed] = useSidebarCollapsed();
@@ -113,11 +114,34 @@ const PageLayout = () => {
linkElement.href = logo;
}
}
const savedLang = localStorage.getItem('i18nextLng');
if (savedLang) {
i18n.changeLanguage(savedLang);
}, []);
useEffect(() => {
let preferredLang;
if (userState?.user?.setting) {
try {
const settings = JSON.parse(userState.user.setting);
preferredLang = normalizeLanguage(settings.language);
} catch (e) {
// Ignore parse errors
}
}
}, [i18n]);
if (!preferredLang) {
const savedLang = localStorage.getItem('i18nextLng');
if (savedLang) {
preferredLang = normalizeLanguage(savedLang);
}
}
if (preferredLang) {
localStorage.setItem('i18nextLng', preferredLang);
if (preferredLang !== i18n.language) {
i18n.changeLanguage(preferredLang);
}
}
}, [i18n, userState?.user?.setting]);
return (
<Layout

View File

@@ -86,6 +86,7 @@ const PersonalSetting = () => {
gotifyUrl: '',
gotifyToken: '',
gotifyPriority: 5,
upstreamModelUpdateNotifyEnabled: false,
acceptUnsetModelRatioModel: false,
recordIpLog: false,
});
@@ -158,6 +159,8 @@ const PersonalSetting = () => {
gotifyToken: settings.gotify_token || '',
gotifyPriority:
settings.gotify_priority !== undefined ? settings.gotify_priority : 5,
upstreamModelUpdateNotifyEnabled:
settings.upstream_model_update_notify_enabled === true,
acceptUnsetModelRatioModel:
settings.accept_unset_model_ratio_model || false,
recordIpLog: settings.record_ip_log || false,
@@ -426,6 +429,8 @@ const PersonalSetting = () => {
const parsed = parseInt(notificationSettings.gotifyPriority);
return isNaN(parsed) ? 5 : parsed;
})(),
upstream_model_update_notify_enabled:
notificationSettings.upstreamModelUpdateNotifyEnabled === true,
accept_unset_model_ratio_model:
notificationSettings.acceptUnsetModelRatioModel,
record_ip_log: notificationSettings.recordIpLog,

View File

@@ -95,19 +95,19 @@ const RatioSetting = () => {
return (
<Spin spinning={loading} size='large'>
{/* 模型倍率设置以及可视化编辑器 */}
{/* 模型倍率设置以及价格编辑器 */}
<Card style={{ marginTop: '10px' }}>
<Tabs type='card'>
<Tabs type='card' defaultActiveKey='visual'>
<Tabs.TabPane tab={t('模型倍率设置')} itemKey='model'>
<ModelRatioSettings options={inputs} refresh={onRefresh} />
</Tabs.TabPane>
<Tabs.TabPane tab={t('分组倍率设置')} itemKey='group'>
<Tabs.TabPane tab={t('分组相关设置')} itemKey='group'>
<GroupRatioSettings options={inputs} refresh={onRefresh} />
</Tabs.TabPane>
<Tabs.TabPane tab={t('可视化倍率设置')} itemKey='visual'>
<Tabs.TabPane tab={t('价格设置')} itemKey='visual'>
<ModelSettingsVisualEditor options={inputs} refresh={onRefresh} />
</Tabs.TabPane>
<Tabs.TabPane tab={t('未设置倍率模型')} itemKey='unset_models'>
<Tabs.TabPane tab={t('未设置价格模型')} itemKey='unset_models'>
<ModelRatioNotSetEditor options={inputs} refresh={onRefresh} />
</Tabs.TabPane>
<Tabs.TabPane tab={t('上游倍率同步')} itemKey='upstream_sync'>

View File

@@ -58,6 +58,7 @@ const NotificationSettings = ({
const formApiRef = useRef(null);
const [statusState] = useContext(StatusContext);
const [userState] = useContext(UserContext);
const isAdminOrRoot = (userState?.user?.role || 0) >= 10;
// 左侧边栏设置相关状态
const [sidebarLoading, setSidebarLoading] = useState(false);
@@ -470,6 +471,21 @@ const NotificationSettings = ({
]}
/>
{isAdminOrRoot && (
<Form.Switch
field='upstreamModelUpdateNotifyEnabled'
label={t('接收上游模型更新通知')}
checkedText={t('开')}
uncheckedText={t('关')}
onChange={(value) =>
handleFormChange('upstreamModelUpdateNotifyEnabled', value)
}
extraText={t(
'仅管理员可用。开启后,当系统定时检测全部渠道发现上游模型变更或检测异常时,将按你选择的通知方式发送汇总通知;渠道或模型过多时会自动省略部分明细。',
)}
/>
)}
{/* 邮件通知设置 */}
{notificationSettings.warningType === 'email' && (
<Form.Input

View File

@@ -23,6 +23,7 @@ import { Languages } from "lucide-react";
import { useTranslation } from "react-i18next";
import { API, showSuccess, showError } from "../../../../helpers";
import { UserContext } from "../../../../context/User";
import { normalizeLanguage } from "../../../../i18n/language";
// Language options with native names
const languageOptions = [
@@ -39,7 +40,7 @@ const PreferencesSettings = ({ t }) => {
const { i18n } = useTranslation();
const [userState, userDispatch] = useContext(UserContext);
const [currentLanguage, setCurrentLanguage] = useState(
i18n.language || "zh-CN",
normalizeLanguage(i18n.language) || "zh-CN",
);
const [loading, setLoading] = useState(false);
@@ -49,8 +50,7 @@ const PreferencesSettings = ({ t }) => {
try {
const settings = JSON.parse(userState.user.setting);
if (settings.language) {
// Normalize legacy "zh" to "zh-CN" for backward compatibility
const lang = settings.language === "zh" ? "zh-CN" : settings.language;
const lang = normalizeLanguage(settings.language);
setCurrentLanguage(lang);
// Sync i18n with saved preference
if (i18n.language !== lang) {
@@ -73,6 +73,7 @@ const PreferencesSettings = ({ t }) => {
// Update language immediately for responsive UX
setCurrentLanguage(lang);
i18n.changeLanguage(lang);
localStorage.setItem('i18nextLng', lang);
// Save to backend
const res = await API.put("/api/user/self", {
@@ -81,33 +82,38 @@ const PreferencesSettings = ({ t }) => {
if (res.data.success) {
showSuccess(t("语言偏好已保存"));
// Update user context with new setting
// Keep backend preference, context state, and local cache aligned.
let settings = {};
if (userState?.user?.setting) {
try {
const settings = JSON.parse(userState.user.setting);
settings.language = lang;
userDispatch({
type: "login",
payload: {
...userState.user,
setting: JSON.stringify(settings),
},
});
settings = JSON.parse(userState.user.setting) || {};
} catch (e) {
// Ignore
settings = {};
}
}
settings.language = lang;
const nextUser = {
...userState.user,
setting: JSON.stringify(settings),
};
userDispatch({
type: "login",
payload: nextUser,
});
localStorage.setItem("user", JSON.stringify(nextUser));
} else {
showError(res.data.message || t("保存失败"));
// Revert on error
setCurrentLanguage(previousLang);
i18n.changeLanguage(previousLang);
localStorage.setItem("i18nextLng", previousLang);
}
} catch (error) {
showError(t("保存失败,请重试"));
// Revert on error
setCurrentLanguage(previousLang);
i18n.changeLanguage(previousLang);
localStorage.setItem("i18nextLng", previousLang);
} finally {
setLoading(false);
}

View File

@@ -36,6 +36,10 @@ const ChannelsActions = ({
fixChannelsAbilities,
updateAllChannelsBalance,
deleteAllDisabledChannels,
applyAllUpstreamUpdates,
detectAllUpstreamUpdates,
detectAllUpstreamUpdatesLoading,
applyAllUpstreamUpdatesLoading,
compactMode,
setCompactMode,
idSort,
@@ -96,6 +100,8 @@ const ChannelsActions = ({
size='small'
type='tertiary'
className='w-full'
loading={detectAllUpstreamUpdatesLoading}
disabled={detectAllUpstreamUpdatesLoading}
onClick={() => {
Modal.confirm({
title: t('确定?'),
@@ -146,6 +152,46 @@ const ChannelsActions = ({
{t('更新所有已启用通道余额')}
</Button>
</Dropdown.Item>
<Dropdown.Item>
<Button
size='small'
type='tertiary'
className='w-full'
onClick={() => {
Modal.confirm({
title: t('确定?'),
content: t(
'确定要仅检测全部渠道上游模型更新吗?(不执行新增/删除)',
),
onOk: () => detectAllUpstreamUpdates(),
size: 'sm',
centered: true,
});
}}
>
{t('检测全部渠道上游更新')}
</Button>
</Dropdown.Item>
<Dropdown.Item>
<Button
size='small'
type='primary'
className='w-full'
loading={applyAllUpstreamUpdatesLoading}
disabled={applyAllUpstreamUpdatesLoading}
onClick={() => {
Modal.confirm({
title: t('确定?'),
content: t('确定要对全部渠道执行上游模型更新吗?'),
onOk: () => applyAllUpstreamUpdates(),
size: 'sm',
centered: true,
});
}}
>
{t('处理全部渠道上游更新')}
</Button>
</Dropdown.Item>
<Dropdown.Item>
<Button
size='small'

View File

@@ -37,8 +37,13 @@ import {
renderQuotaWithAmount,
showSuccess,
showError,
showInfo,
} from '../../../helpers';
import { CHANNEL_OPTIONS } from '../../../constants';
import {
CHANNEL_OPTIONS,
MODEL_FETCHABLE_CHANNEL_TYPES,
} from '../../../constants';
import { parseUpstreamUpdateMeta } from '../../../hooks/channels/upstreamUpdateUtils';
import {
IconTreeTriangleDown,
IconMore,
@@ -270,6 +275,35 @@ const isRequestPassThroughEnabled = (record) => {
}
};
const getUpstreamUpdateMeta = (record) => {
const supported =
!!record &&
record.children === undefined &&
MODEL_FETCHABLE_CHANNEL_TYPES.has(record.type);
if (!record || record.children !== undefined) {
return {
supported: false,
enabled: false,
pendingAddModels: [],
pendingRemoveModels: [],
};
}
const parsed =
record?.upstreamUpdateMeta && typeof record.upstreamUpdateMeta === 'object'
? record.upstreamUpdateMeta
: parseUpstreamUpdateMeta(record?.settings);
return {
supported,
enabled: parsed?.enabled === true,
pendingAddModels: Array.isArray(parsed?.pendingAddModels)
? parsed.pendingAddModels
: [],
pendingRemoveModels: Array.isArray(parsed?.pendingRemoveModels)
? parsed.pendingRemoveModels
: [],
};
};
export const getChannelsColumns = ({
t,
COLUMN_KEYS,
@@ -291,6 +325,8 @@ export const getChannelsColumns = ({
checkOllamaVersion,
setShowMultiKeyManageModal,
setCurrentMultiKeyChannel,
openUpstreamUpdateModal,
detectChannelUpstreamUpdates,
}) => {
return [
{
@@ -304,6 +340,14 @@ export const getChannelsColumns = ({
dataIndex: 'name',
render: (text, record, index) => {
const passThroughEnabled = isRequestPassThroughEnabled(record);
const upstreamUpdateMeta = getUpstreamUpdateMeta(record);
const pendingAddCount = upstreamUpdateMeta.pendingAddModels.length;
const pendingRemoveCount =
upstreamUpdateMeta.pendingRemoveModels.length;
const showUpstreamUpdateTag =
upstreamUpdateMeta.supported &&
upstreamUpdateMeta.enabled &&
(pendingAddCount > 0 || pendingRemoveCount > 0);
const nameNode =
record.remark && record.remark.trim() !== '' ? (
<Tooltip
@@ -339,26 +383,76 @@ export const getChannelsColumns = ({
<span>{text}</span>
);
if (!passThroughEnabled) {
if (!passThroughEnabled && !showUpstreamUpdateTag) {
return nameNode;
}
return (
<Space spacing={6} align='center'>
{nameNode}
<Tooltip
content={t(
'该渠道已开启请求透传:参数覆写、模型重定向、渠道适配等 NewAPI 内置功能将失效,非最佳实践;如因此产生问题,请勿提交 issue 反馈。',
)}
trigger='hover'
position='topLeft'
>
<span className='inline-flex items-center'>
<IconAlertTriangle
style={{ color: 'var(--semi-color-warning)' }}
/>
</span>
</Tooltip>
{passThroughEnabled && (
<Tooltip
content={t(
'该渠道已开启请求透传:参数覆写、模型重定向、渠道适配等 NewAPI 内置功能将失效,非最佳实践;如因此产生问题,请勿提交 issue 反馈。',
)}
trigger='hover'
position='topLeft'
>
<span className='inline-flex items-center'>
<IconAlertTriangle
style={{ color: 'var(--semi-color-warning)' }}
/>
</span>
</Tooltip>
)}
{showUpstreamUpdateTag && (
<Space spacing={4} align='center'>
{pendingAddCount > 0 ? (
<Tooltip content={t('点击处理新增模型')} position='top'>
<Tag
color='green'
type='light'
size='small'
shape='circle'
className='cursor-pointer transition-all duration-150 hover:opacity-85 hover:-translate-y-px active:scale-95'
onClick={(e) => {
e.stopPropagation();
openUpstreamUpdateModal(
record,
upstreamUpdateMeta.pendingAddModels,
upstreamUpdateMeta.pendingRemoveModels,
'add',
);
}}
>
+{pendingAddCount}
</Tag>
</Tooltip>
) : null}
{pendingRemoveCount > 0 ? (
<Tooltip content={t('点击处理删除模型')} position='top'>
<Tag
color='red'
type='light'
size='small'
shape='circle'
className='cursor-pointer transition-all duration-150 hover:opacity-85 hover:-translate-y-px active:scale-95'
onClick={(e) => {
e.stopPropagation();
openUpstreamUpdateModal(
record,
upstreamUpdateMeta.pendingAddModels,
upstreamUpdateMeta.pendingRemoveModels,
'remove',
);
}}
>
-{pendingRemoveCount}
</Tag>
</Tooltip>
) : null}
</Space>
)}
</Space>
);
},
@@ -585,6 +679,7 @@ export const getChannelsColumns = ({
fixed: 'right',
render: (text, record, index) => {
if (record.children === undefined) {
const upstreamUpdateMeta = getUpstreamUpdateMeta(record);
const moreMenuItems = [
{
node: 'item',
@@ -622,6 +717,43 @@ export const getChannelsColumns = ({
},
];
if (upstreamUpdateMeta.supported) {
moreMenuItems.push({
node: 'item',
name: t('仅检测上游模型更新'),
type: 'tertiary',
onClick: () => {
detectChannelUpstreamUpdates(record);
},
});
moreMenuItems.push({
node: 'item',
name: t('处理上游模型更新'),
type: 'tertiary',
onClick: () => {
if (!upstreamUpdateMeta.enabled) {
showInfo(t('该渠道未开启上游模型更新检测'));
return;
}
if (
upstreamUpdateMeta.pendingAddModels.length === 0 &&
upstreamUpdateMeta.pendingRemoveModels.length === 0
) {
showInfo(t('该渠道暂无可处理的上游模型更新'));
return;
}
openUpstreamUpdateModal(
record,
upstreamUpdateMeta.pendingAddModels,
upstreamUpdateMeta.pendingRemoveModels,
upstreamUpdateMeta.pendingAddModels.length > 0
? 'add'
: 'remove',
);
},
});
}
if (record.type === 4) {
moreMenuItems.unshift({
node: 'item',

View File

@@ -61,6 +61,8 @@ const ChannelsTable = (channelsData) => {
// Multi-key management
setShowMultiKeyManageModal,
setCurrentMultiKeyChannel,
openUpstreamUpdateModal,
detectChannelUpstreamUpdates,
} = channelsData;
// Get all columns
@@ -86,6 +88,8 @@ const ChannelsTable = (channelsData) => {
checkOllamaVersion,
setShowMultiKeyManageModal,
setCurrentMultiKeyChannel,
openUpstreamUpdateModal,
detectChannelUpstreamUpdates,
});
}, [
t,
@@ -108,6 +112,8 @@ const ChannelsTable = (channelsData) => {
checkOllamaVersion,
setShowMultiKeyManageModal,
setCurrentMultiKeyChannel,
openUpstreamUpdateModal,
detectChannelUpstreamUpdates,
]);
// Filter columns based on visibility settings

View File

@@ -33,6 +33,7 @@ import ColumnSelectorModal from './modals/ColumnSelectorModal';
import EditChannelModal from './modals/EditChannelModal';
import EditTagModal from './modals/EditTagModal';
import MultiKeyManageModal from './modals/MultiKeyManageModal';
import ChannelUpstreamUpdateModal from './modals/ChannelUpstreamUpdateModal';
import { createCardProPagination } from '../../../helpers/utils';
const ChannelsPage = () => {
@@ -63,6 +64,15 @@ const ChannelsPage = () => {
channel={channelsData.currentMultiKeyChannel}
onRefresh={channelsData.refresh}
/>
<ChannelUpstreamUpdateModal
visible={channelsData.showUpstreamUpdateModal}
addModels={channelsData.upstreamUpdateAddModels}
removeModels={channelsData.upstreamUpdateRemoveModels}
preferredTab={channelsData.upstreamUpdatePreferredTab}
confirmLoading={channelsData.upstreamApplyLoading}
onConfirm={channelsData.applyUpstreamUpdates}
onCancel={channelsData.closeUpstreamUpdateModal}
/>
{/* Main Content */}
{channelsData.globalPassThroughEnabled ? (

View File

@@ -0,0 +1,313 @@
/*
Copyright (C) 2025 QuantumNous
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
For commercial licensing, please contact support@quantumnous.com
*/
import React, { useEffect, useMemo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import {
Modal,
Checkbox,
Empty,
Input,
Tabs,
Typography,
} from '@douyinfe/semi-ui';
import {
IllustrationNoResult,
IllustrationNoResultDark,
} from '@douyinfe/semi-illustrations';
import { IconSearch } from '@douyinfe/semi-icons';
import { useIsMobile } from '../../../../hooks/common/useIsMobile';
const normalizeModels = (models = []) =>
Array.from(
new Set(
(models || []).map((model) => String(model || '').trim()).filter(Boolean),
),
);
const filterByKeyword = (models = [], keyword = '') => {
const normalizedKeyword = String(keyword || '')
.trim()
.toLowerCase();
if (!normalizedKeyword) {
return models;
}
return models.filter((model) =>
String(model).toLowerCase().includes(normalizedKeyword),
);
};
const ChannelUpstreamUpdateModal = ({
visible,
addModels = [],
removeModels = [],
preferredTab = 'add',
confirmLoading = false,
onConfirm,
onCancel,
}) => {
const { t } = useTranslation();
const isMobile = useIsMobile();
const normalizedAddModels = useMemo(
() => normalizeModels(addModels),
[addModels],
);
const normalizedRemoveModels = useMemo(
() => normalizeModels(removeModels),
[removeModels],
);
const [selectedAddModels, setSelectedAddModels] = useState([]);
const [selectedRemoveModels, setSelectedRemoveModels] = useState([]);
const [keyword, setKeyword] = useState('');
const [activeTab, setActiveTab] = useState('add');
const [partialSubmitConfirmed, setPartialSubmitConfirmed] = useState(false);
const addTabEnabled = normalizedAddModels.length > 0;
const removeTabEnabled = normalizedRemoveModels.length > 0;
const filteredAddModels = useMemo(
() => filterByKeyword(normalizedAddModels, keyword),
[normalizedAddModels, keyword],
);
const filteredRemoveModels = useMemo(
() => filterByKeyword(normalizedRemoveModels, keyword),
[normalizedRemoveModels, keyword],
);
useEffect(() => {
if (!visible) {
return;
}
setSelectedAddModels([]);
setSelectedRemoveModels([]);
setKeyword('');
setPartialSubmitConfirmed(false);
const normalizedPreferredTab = preferredTab === 'remove' ? 'remove' : 'add';
if (normalizedPreferredTab === 'remove' && removeTabEnabled) {
setActiveTab('remove');
return;
}
if (normalizedPreferredTab === 'add' && addTabEnabled) {
setActiveTab('add');
return;
}
setActiveTab(addTabEnabled ? 'add' : 'remove');
}, [visible, addTabEnabled, removeTabEnabled, preferredTab]);
const currentModels =
activeTab === 'add' ? filteredAddModels : filteredRemoveModels;
const currentSelectedModels =
activeTab === 'add' ? selectedAddModels : selectedRemoveModels;
const currentSetSelectedModels =
activeTab === 'add' ? setSelectedAddModels : setSelectedRemoveModels;
const selectedAddCount = selectedAddModels.length;
const selectedRemoveCount = selectedRemoveModels.length;
const checkedCount = currentModels.filter((model) =>
currentSelectedModels.includes(model),
).length;
const isAllChecked =
currentModels.length > 0 && checkedCount === currentModels.length;
const isIndeterminate =
checkedCount > 0 && checkedCount < currentModels.length;
const handleToggleAllCurrent = (checked) => {
if (checked) {
const merged = normalizeModels([
...currentSelectedModels,
...currentModels,
]);
currentSetSelectedModels(merged);
return;
}
const currentSet = new Set(currentModels);
currentSetSelectedModels(
currentSelectedModels.filter((model) => !currentSet.has(model)),
);
};
const tabList = [
{
itemKey: 'add',
tab: `${t('新增模型')} (${selectedAddCount}/${normalizedAddModels.length})`,
disabled: !addTabEnabled,
},
{
itemKey: 'remove',
tab: `${t('删除模型')} (${selectedRemoveCount}/${normalizedRemoveModels.length})`,
disabled: !removeTabEnabled,
},
];
const submitSelectedChanges = () => {
onConfirm?.({
addModels: selectedAddModels,
removeModels: selectedRemoveModels,
});
};
const handleSubmit = () => {
const hasAnySelected = selectedAddCount > 0 || selectedRemoveCount > 0;
if (!hasAnySelected) {
submitSelectedChanges();
return;
}
const hasBothPending = addTabEnabled && removeTabEnabled;
const hasUnselectedAdd = addTabEnabled && selectedAddCount === 0;
const hasUnselectedRemove = removeTabEnabled && selectedRemoveCount === 0;
if (hasBothPending && (hasUnselectedAdd || hasUnselectedRemove)) {
if (partialSubmitConfirmed) {
submitSelectedChanges();
return;
}
const missingTab = hasUnselectedAdd ? 'add' : 'remove';
const missingType = hasUnselectedAdd ? t('新增') : t('删除');
const missingCount = hasUnselectedAdd
? normalizedAddModels.length
: normalizedRemoveModels.length;
setActiveTab(missingTab);
Modal.confirm({
title: t('仍有未处理项'),
content: t(
'你还没有处理{{type}}模型({{count}}个)。是否仅提交当前已勾选内容?',
{
type: missingType,
count: missingCount,
},
),
okText: t('仅提交已勾选'),
cancelText: t('去处理{{type}}', { type: missingType }),
centered: true,
onOk: () => {
setPartialSubmitConfirmed(true);
submitSelectedChanges();
},
});
return;
}
submitSelectedChanges();
};
return (
<Modal
visible={visible}
title={t('处理上游模型更新')}
okText={t('确定')}
cancelText={t('取消')}
size={isMobile ? 'full-width' : 'medium'}
centered
closeOnEsc
maskClosable
confirmLoading={confirmLoading}
onCancel={onCancel}
onOk={handleSubmit}
>
<div className='flex flex-col gap-3'>
<Typography.Text type='secondary' size='small'>
{t(
'可勾选需要执行的变更:新增会加入渠道模型列表,删除会从渠道模型列表移除。',
)}
</Typography.Text>
<Tabs
type='slash'
size='small'
tabList={tabList}
activeKey={activeTab}
onChange={(key) => setActiveTab(key)}
/>
<div className='flex items-center gap-3 text-xs text-gray-500'>
<span>
{t('新增已选 {{selected}} / {{total}}', {
selected: selectedAddCount,
total: normalizedAddModels.length,
})}
</span>
<span>
{t('删除已选 {{selected}} / {{total}}', {
selected: selectedRemoveCount,
total: normalizedRemoveModels.length,
})}
</span>
</div>
<Input
prefix={<IconSearch size={14} />}
placeholder={t('搜索模型')}
value={keyword}
onChange={(value) => setKeyword(value)}
showClear
/>
<div style={{ maxHeight: 320, overflowY: 'auto', paddingRight: 8 }}>
{currentModels.length === 0 ? (
<Empty
image={
<IllustrationNoResult style={{ width: 150, height: 150 }} />
}
darkModeImage={
<IllustrationNoResultDark style={{ width: 150, height: 150 }} />
}
description={t('暂无匹配模型')}
style={{ padding: 24 }}
/>
) : (
<Checkbox.Group
value={currentSelectedModels}
onChange={(values) =>
currentSetSelectedModels(normalizeModels(values))
}
>
<div className='grid grid-cols-1 md:grid-cols-2 gap-x-4'>
{currentModels.map((model) => (
<Checkbox
key={`${activeTab}:${model}`}
value={model}
className='my-1'
>
{model}
</Checkbox>
))}
</div>
</Checkbox.Group>
)}
</div>
<div className='flex items-center justify-end gap-2'>
<Typography.Text type='secondary' size='small'>
{t('已选择 {{selected}} / {{total}}', {
selected: checkedCount,
total: currentModels.length,
})}
</Typography.Text>
<Checkbox
checked={isAllChecked}
indeterminate={isIndeterminate}
aria-label={t('全选当前列表模型')}
onChange={(e) => handleToggleAllCurrent(e.target.checked)}
/>
</div>
</div>
</Modal>
);
};
export default ChannelUpstreamUpdateModal;

View File

@@ -27,7 +27,7 @@ import {
verifyJSON,
} from '../../../../helpers';
import { useIsMobile } from '../../../../hooks/common/useIsMobile';
import { CHANNEL_OPTIONS } from '../../../../constants';
import { CHANNEL_OPTIONS, MODEL_FETCHABLE_CHANNEL_TYPES } from '../../../../constants';
import {
SideSheet,
Space,
@@ -100,6 +100,7 @@ const REGION_EXAMPLE = {
'gemini-1.5-flash-002': 'europe-west2',
'claude-3-5-sonnet-20240620': 'europe-west1',
};
const UPSTREAM_DETECTED_MODEL_PREVIEW_LIMIT = 8;
const PARAM_OVERRIDE_LEGACY_TEMPLATE = {
temperature: 0,
@@ -203,6 +204,11 @@ const EditChannelModal = (props) => {
allow_include_obfuscation: false,
allow_inference_geo: false,
claude_beta_query: false,
upstream_model_update_check_enabled: false,
upstream_model_update_auto_sync_enabled: false,
upstream_model_update_last_check_time: 0,
upstream_model_update_last_detected_models: [],
upstream_model_update_ignored_models: '',
};
const [batch, setBatch] = useState(false);
const [multiToSingle, setMultiToSingle] = useState(false);
@@ -257,6 +263,23 @@ const EditChannelModal = (props) => {
return [];
}
}, [inputs.model_mapping]);
const upstreamDetectedModels = useMemo(
() =>
Array.from(
new Set(
(inputs.upstream_model_update_last_detected_models || [])
.map((model) => String(model || '').trim())
.filter(Boolean),
),
),
[inputs.upstream_model_update_last_detected_models],
);
const upstreamDetectedModelsPreview = useMemo(
() => upstreamDetectedModels.slice(0, UPSTREAM_DETECTED_MODEL_PREVIEW_LIMIT),
[upstreamDetectedModels],
);
const upstreamDetectedModelsOmittedCount =
upstreamDetectedModels.length - upstreamDetectedModelsPreview.length;
const modelSearchMatchedCount = useMemo(() => {
const keyword = modelSearchValue.trim();
if (!keyword) {
@@ -665,6 +688,14 @@ const EditChannelModal = (props) => {
}
};
const formatUnixTime = (timestamp) => {
const value = Number(timestamp || 0);
if (!value) {
return t('暂无');
}
return new Date(value * 1000).toLocaleString();
};
const copyParamOverrideJson = async () => {
const raw =
typeof inputs.param_override === 'string'
@@ -854,6 +885,22 @@ const EditChannelModal = (props) => {
data.allow_inference_geo =
parsedSettings.allow_inference_geo || false;
data.claude_beta_query = parsedSettings.claude_beta_query || false;
data.upstream_model_update_check_enabled =
parsedSettings.upstream_model_update_check_enabled === true;
data.upstream_model_update_auto_sync_enabled =
parsedSettings.upstream_model_update_auto_sync_enabled === true;
data.upstream_model_update_last_check_time =
Number(parsedSettings.upstream_model_update_last_check_time) || 0;
data.upstream_model_update_last_detected_models = Array.isArray(
parsedSettings.upstream_model_update_last_detected_models,
)
? parsedSettings.upstream_model_update_last_detected_models
: [];
data.upstream_model_update_ignored_models = Array.isArray(
parsedSettings.upstream_model_update_ignored_models,
)
? parsedSettings.upstream_model_update_ignored_models.join(',')
: '';
} catch (error) {
console.error('解析其他设置失败:', error);
data.azure_responses_version = '';
@@ -867,6 +914,11 @@ const EditChannelModal = (props) => {
data.allow_include_obfuscation = false;
data.allow_inference_geo = false;
data.claude_beta_query = false;
data.upstream_model_update_check_enabled = false;
data.upstream_model_update_auto_sync_enabled = false;
data.upstream_model_update_last_check_time = 0;
data.upstream_model_update_last_detected_models = [];
data.upstream_model_update_ignored_models = '';
}
} else {
// 兼容历史数据:老渠道没有 settings 时,默认按 json 展示
@@ -879,6 +931,11 @@ const EditChannelModal = (props) => {
data.allow_include_obfuscation = false;
data.allow_inference_geo = false;
data.claude_beta_query = false;
data.upstream_model_update_check_enabled = false;
data.upstream_model_update_auto_sync_enabled = false;
data.upstream_model_update_last_check_time = 0;
data.upstream_model_update_last_detected_models = [];
data.upstream_model_update_ignored_models = '';
}
if (
@@ -1009,7 +1066,7 @@ const EditChannelModal = (props) => {
const mappingKey = String(pairKey ?? '').trim();
if (!mappingKey) return;
if (!MODEL_FETCHABLE_TYPES.has(inputs.type)) {
if (!MODEL_FETCHABLE_CHANNEL_TYPES.has(inputs.type)) {
return;
}
@@ -1681,6 +1738,29 @@ const EditChannelModal = (props) => {
}
}
settings.upstream_model_update_check_enabled =
localInputs.upstream_model_update_check_enabled === true;
settings.upstream_model_update_auto_sync_enabled =
settings.upstream_model_update_check_enabled &&
localInputs.upstream_model_update_auto_sync_enabled === true;
settings.upstream_model_update_ignored_models = Array.from(
new Set(
String(localInputs.upstream_model_update_ignored_models || '')
.split(',')
.map((model) => model.trim())
.filter(Boolean),
),
);
if (
!Array.isArray(settings.upstream_model_update_last_detected_models) ||
!settings.upstream_model_update_check_enabled
) {
settings.upstream_model_update_last_detected_models = [];
}
if (typeof settings.upstream_model_update_last_check_time !== 'number') {
settings.upstream_model_update_last_check_time = 0;
}
localInputs.settings = JSON.stringify(settings);
// 清理不需要发送到后端的字段
@@ -1702,6 +1782,11 @@ const EditChannelModal = (props) => {
delete localInputs.allow_include_obfuscation;
delete localInputs.allow_inference_geo;
delete localInputs.claude_beta_query;
delete localInputs.upstream_model_update_check_enabled;
delete localInputs.upstream_model_update_auto_sync_enabled;
delete localInputs.upstream_model_update_last_check_time;
delete localInputs.upstream_model_update_last_detected_models;
delete localInputs.upstream_model_update_ignored_models;
let res;
localInputs.auto_ban = localInputs.auto_ban ? 1 : 0;
@@ -3080,7 +3165,7 @@ const EditChannelModal = (props) => {
>
{t('填入所有模型')}
</Button>
{MODEL_FETCHABLE_TYPES.has(inputs.type) && (
{MODEL_FETCHABLE_CHANNEL_TYPES.has(inputs.type) && (
<Button
size='small'
type='tertiary'
@@ -3183,6 +3268,44 @@ const EditChannelModal = (props) => {
}
/>
{MODEL_FETCHABLE_CHANNEL_TYPES.has(inputs.type) && (
<>
<Form.Switch
field='upstream_model_update_check_enabled'
label={t('是否检测上游模型更新')}
checkedText={t('开')}
uncheckedText={t('关')}
onChange={(value) =>
handleChannelOtherSettingsChange(
'upstream_model_update_check_enabled',
value,
)
}
extraText={t(
'开启后由后端定时任务检测该渠道上游模型变化',
)}
/>
<div className='text-xs text-gray-500 mb-2'>
{t('上次检测时间')}:&nbsp;
{formatUnixTime(
inputs.upstream_model_update_last_check_time,
)}
</div>
<Form.Input
field='upstream_model_update_ignored_models'
label={t('已忽略模型')}
placeholder={t('例如gpt-4.1-nano,gpt-4o-mini')}
onChange={(value) =>
handleInputChange(
'upstream_model_update_ignored_models',
value,
)
}
showClear
/>
</>
)}
<Form.Input
field='test_model'
label={t('默认测试模型')}
@@ -3212,7 +3335,7 @@ const EditChannelModal = (props) => {
editorType='keyValue'
formApi={formApiRef.current}
renderStringValueSuffix={({ pairKey, value }) => {
if (!MODEL_FETCHABLE_TYPES.has(inputs.type)) {
if (!MODEL_FETCHABLE_CHANNEL_TYPES.has(inputs.type)) {
return null;
}
const disabled = !String(pairKey ?? '').trim();
@@ -3332,31 +3455,80 @@ const EditChannelModal = (props) => {
initValue={autoBan}
/>
<Form.Switch
field='upstream_model_update_auto_sync_enabled'
label={t('是否自动同步上游模型更新')}
checkedText={t('开')}
uncheckedText={t('关')}
disabled={!inputs.upstream_model_update_check_enabled}
onChange={(value) =>
handleChannelOtherSettingsChange(
'upstream_model_update_auto_sync_enabled',
value,
)
}
extraText={t(
'开启后检测到新增模型会自动加入当前渠道模型列表',
)}
/>
<div className='text-xs text-gray-500 mb-3'>
{t('上次检测到可加入模型')}:&nbsp;
{upstreamDetectedModels.length === 0 ? (
t('暂无')
) : (
<>
<Tooltip
position='topLeft'
content={
<div className='max-w-[640px] break-all text-xs leading-5'>
{upstreamDetectedModels.join(', ')}
</div>
}
>
<span className='cursor-help break-all'>
{upstreamDetectedModelsPreview.join(', ')}
</span>
</Tooltip>
<span className='ml-1 text-gray-400'>
{upstreamDetectedModelsOmittedCount > 0
? t('(共 {{total}} 个,省略 {{omit}} 个)', {
total: upstreamDetectedModels.length,
omit: upstreamDetectedModelsOmittedCount,
})
: t('(共 {{total}} 个)', {
total: upstreamDetectedModels.length,
})}
</span>
</>
)}
</div>
<div className='mb-4'>
<div className='flex items-center justify-between gap-2 mb-1'>
<Text className='text-sm font-medium'>{t('参数覆盖')}</Text>
<Space wrap>
<Button
size='small'
type='primary'
icon={<IconCode size={14} />}
onClick={() => setParamOverrideEditorVisible(true)}
size='small'
type='primary'
icon={<IconCode size={14} />}
onClick={() => setParamOverrideEditorVisible(true)}
>
{t('可视化编辑')}
</Button>
<Button
size='small'
onClick={() =>
applyParamOverrideTemplate('operations', 'fill')
}
size='small'
onClick={() =>
applyParamOverrideTemplate('operations', 'fill')
}
>
{t('填充新模板')}
</Button>
<Button
size='small'
onClick={() =>
applyParamOverrideTemplate('legacy', 'fill')
}
size='small'
onClick={() =>
applyParamOverrideTemplate('legacy', 'fill')
}
>
{t('填充旧模板')}
</Button>
@@ -3373,11 +3545,11 @@ const EditChannelModal = (props) => {
{t('此项可选,用于覆盖请求参数。不支持覆盖 stream 参数')}
</Text>
<div
className='mt-2 rounded-xl p-3'
style={{
backgroundColor: 'var(--semi-color-fill-0)',
border: '1px solid var(--semi-color-fill-2)',
}}
className='mt-2 rounded-xl p-3'
style={{
backgroundColor: 'var(--semi-color-fill-0)',
border: '1px solid var(--semi-color-fill-2)',
}}
>
<div className='flex items-center justify-between mb-2'>
<Tag color={paramOverrideMeta.tagColor}>
@@ -3385,17 +3557,17 @@ const EditChannelModal = (props) => {
</Tag>
<Space spacing={8}>
<Button
size='small'
icon={<IconCopy />}
type='tertiary'
onClick={copyParamOverrideJson}
size='small'
icon={<IconCopy />}
type='tertiary'
onClick={copyParamOverrideJson}
>
{t('复制')}
</Button>
<Button
size='small'
type='tertiary'
onClick={() => setParamOverrideEditorVisible(true)}
size='small'
type='tertiary'
onClick={() => setParamOverrideEditorVisible(true)}
>
{t('编辑')}
</Button>
@@ -3408,82 +3580,81 @@ const EditChannelModal = (props) => {
</div>
<Form.TextArea
field='header_override'
label={t('请求头覆盖')}
placeholder={
t('此项可选,用于覆盖请求头参数') +
'\n' +
t('格式示例:') +
'\n{\n "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36 Edg/139.0.0.0",\n "Authorization": "Bearer {api_key}"\n}'
}
autosize
onChange={(value) =>
handleInputChange('header_override', value)
}
extraText={
<div className='flex flex-col gap-1'>
<div className='flex gap-2 flex-wrap items-center'>
<Text
className='!text-semi-color-primary cursor-pointer'
onClick={() =>
handleInputChange(
'header_override',
JSON.stringify(
{
'*': true,
're:^X-Trace-.*$': true,
'X-Foo': '{client_header:X-Foo}',
Authorization: 'Bearer {api_key}',
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36 Edg/139.0.0.0',
},
null,
2,
),
)
}
>
{t('填入模板')}
</Text>
<Text
className='!text-semi-color-primary cursor-pointer'
onClick={() =>
handleInputChange(
'header_override',
JSON.stringify(
{
'*': true,
},
null,
2,
),
)
}
>
{t('填入透传模版')}
</Text>
<Text
className='!text-semi-color-primary cursor-pointer'
onClick={() => formatJsonField('header_override')}
>
{t('格式化')}
</Text>
</div>
<div>
<Text type='tertiary' size='small'>
{t('支持变量:')}
</Text>
<div className='text-xs text-tertiary ml-2'>
<div>
{t('渠道密钥')}: {'{api_key}'}
field='header_override'
label={t('请求头覆盖')}
placeholder={
t('此项可选,用于覆盖请求头参数') +
'\n' +
t('格式示例:') +
'\n{\n "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36 Edg/139.0.0.0",\n "Authorization": "Bearer {api_key}"\n}'
}
autosize
onChange={(value) =>
handleInputChange('header_override', value)
}
extraText={
<div className='flex flex-col gap-1'>
<div className='flex gap-2 flex-wrap items-center'>
<Text
className='!text-semi-color-primary cursor-pointer'
onClick={() =>
handleInputChange(
'header_override',
JSON.stringify(
{
'*': true,
're:^X-Trace-.*$': true,
'X-Foo': '{client_header:X-Foo}',
Authorization: 'Bearer {api_key}',
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36 Edg/139.0.0.0',
},
null,
2,
),
)
}
>
{t('填入模板')}
</Text>
<Text
className='!text-semi-color-primary cursor-pointer'
onClick={() =>
handleInputChange(
'header_override',
JSON.stringify(
{
'*': true,
},
null,
2,
),
)
}
>
{t('填入透传模版')}
</Text>
<Text
className='!text-semi-color-primary cursor-pointer'
onClick={() => formatJsonField('header_override')}
>
{t('格式化')}
</Text>
</div>
<div>
<Text type='tertiary' size='small'>
{t('支持变量:')}
</Text>
<div className='text-xs text-tertiary ml-2'>
<div>
{t('渠道密钥')}: {'{api_key}'}
</div>
</div>
</div>
</div>
</div>
}
showClear
}
showClear
/>
<JSONEditor
key={`status_code_mapping-${isEdit ? channelId : 'new'}`}
field='status_code_mapping'

View File

@@ -34,7 +34,7 @@ import {
TextArea,
Typography,
} from '@douyinfe/semi-ui';
import { IconDelete, IconPlus } from '@douyinfe/semi-icons';
import { IconDelete, IconMenu, IconPlus } from '@douyinfe/semi-icons';
import { copy, showError, showSuccess, verifyJSON } from '../../../../helpers';
import {
CLAUDE_CLI_HEADER_PASSTHROUGH_TEMPLATE,
@@ -163,7 +163,7 @@ const MODE_DESCRIPTIONS = {
prune_objects: '按条件清理对象中的子项',
pass_headers: '把指定请求头透传到上游请求',
sync_fields: '在一个字段有值、另一个缺失时自动补齐',
set_header: '设置运行期请求头(支持整值覆盖,或用 JSON 映射按逗号 token 替换/删除)',
set_header: '设置运行期请求头:可直接覆盖整条值,也可对逗号分隔的 token 做删除、替换、追加或白名单保留',
delete_header: '删除运行期请求头',
copy_header: '复制请求头',
move_header: '移动请求头',
@@ -230,17 +230,29 @@ const getModeValueLabel = (mode) => {
return '值(支持 JSON 或普通文本)';
};
const HEADER_VALUE_JSONC_EXAMPLE = `{
// 置空:删除 Bedrock 不支持的 beta特性
"files-api-2025-04-14": null,
// 替换:把旧特性改成兼容特性
"advanced-tool-use-2025-11-20": "tool-search-tool-2025-10-19",
// 追加:在末尾补一个需要的特性
"$append": ["context-1m-2025-08-07"]
}`;
const getModeValuePlaceholder = (mode) => {
if (mode === 'set_header') {
return [
'String example:',
'纯字符串(整条覆盖):',
'Bearer sk-xxx',
'',
'JSON map example:',
'{"advanced-tool-use-2025-11-20": null, "computer-use-2025-01-24": "computer-use-2025-01-24"}',
'',
'JSON map wildcard:',
'{"*": null, "computer-use-2025-11-24": "computer-use-2025-11-24"}',
'或使用 JSON 规则:',
'{',
' "files-api-2025-04-14": null,',
' "advanced-tool-use-2025-11-20": "tool-search-tool-2025-10-19",',
' "$append": ["context-1m-2025-08-07"]',
'}',
].join('\n');
}
if (mode === 'pass_headers') return 'Authorization, X-Request-Id';
@@ -258,11 +270,6 @@ const getModeValuePlaceholder = (mode) => {
return '0.7';
};
const getModeValueHelp = (mode) => {
if (mode !== 'set_header') return '';
return '字符串整条请求头直接覆盖。JSON 映射:按逗号分隔 token 逐项处理null 表示删除string/array 表示替换,* 表示兜底规则。';
};
const SYNC_TARGET_TYPE_OPTIONS = [
{ label: '请求体字段', value: 'json' },
{ label: '请求头字段', value: 'header' },
@@ -276,6 +283,7 @@ const LEGACY_TEMPLATE = {
const OPERATION_TEMPLATE = {
operations: [
{
description: 'Set default temperature for openai/* models.',
path: 'temperature',
mode: 'set',
value: 0.7,
@@ -294,8 +302,9 @@ const OPERATION_TEMPLATE = {
const HEADER_PASSTHROUGH_TEMPLATE = {
operations: [
{
description: 'Pass through X-Request-Id header to upstream.',
mode: 'pass_headers',
value: ['Authorization'],
value: ['X-Request-Id'],
keep_origin: true,
},
],
@@ -304,6 +313,8 @@ const HEADER_PASSTHROUGH_TEMPLATE = {
const GEMINI_IMAGE_4K_TEMPLATE = {
operations: [
{
description:
'Set imageSize to 4K when model contains gemini/image and ends with 4k.',
mode: 'set',
path: 'generationConfig.imageConfig.imageSize',
value: '4K',
@@ -311,7 +322,17 @@ const GEMINI_IMAGE_4K_TEMPLATE = {
{
path: 'original_model',
mode: 'contains',
value: 'gemini-3-pro-image-preview',
value: 'gemini',
},
{
path: 'original_model',
mode: 'contains',
value: 'image',
},
{
path: 'original_model',
mode: 'suffix',
value: '4k',
},
],
logic: 'AND',
@@ -319,11 +340,13 @@ const GEMINI_IMAGE_4K_TEMPLATE = {
],
};
const AWS_BEDROCK_ANTHROPIC_BETA_OVERRIDE_TEMPLATE = {
const AWS_BEDROCK_ANTHROPIC_COMPAT_TEMPLATE = {
operations: [
{
description: 'Normalize anthropic-beta header tokens for Bedrock compatibility.',
mode: 'set_header',
path: 'anthropic-beta',
// https://github.com/BerriAI/litellm/blob/main/litellm/anthropic_beta_headers_config.json
value: {
'advanced-tool-use-2025-11-20': 'tool-search-tool-2025-10-19',
bash_20241022: null,
@@ -353,8 +376,14 @@ const AWS_BEDROCK_ANTHROPIC_BETA_OVERRIDE_TEMPLATE = {
'tool-search-tool-2025-10-19': 'tool-search-tool-2025-10-19',
'web-fetch-2025-09-10': null,
'web-search-2025-03-05': null,
'oauth-2025-04-20': null
},
},
{
description: 'Remove all tools[*].custom.input_examples before upstream relay.',
mode: 'delete',
path: 'tools.*.custom.input_examples',
},
],
};
@@ -378,7 +407,7 @@ const TEMPLATE_PRESET_CONFIG = {
},
pass_headers_auth: {
group: 'scenario',
label: '请求头透传(Authorization',
label: '请求头透传(X-Request-Id',
kind: 'operations',
payload: HEADER_PASSTHROUGH_TEMPLATE,
},
@@ -402,9 +431,9 @@ const TEMPLATE_PRESET_CONFIG = {
},
aws_bedrock_anthropic_beta_override: {
group: 'scenario',
label: 'AWS Bedrock anthropic-beta覆盖',
label: 'AWS Bedrock Claude 兼容模板',
kind: 'operations',
payload: AWS_BEDROCK_ANTHROPIC_BETA_OVERRIDE_TEMPLATE,
payload: AWS_BEDROCK_ANTHROPIC_COMPAT_TEMPLATE,
},
};
@@ -764,6 +793,7 @@ const createDefaultCondition = () => normalizeCondition({});
const normalizeOperation = (operation = {}) => ({
id: nextLocalId(),
description: typeof operation.description === 'string' ? operation.description : '',
path: typeof operation.path === 'string' ? operation.path : '',
mode: OPERATION_MODE_VALUES.has(operation.mode) ? operation.mode : 'set',
value_text: toValueText(operation.value),
@@ -778,6 +808,38 @@ const normalizeOperation = (operation = {}) => ({
const createDefaultOperation = () => normalizeOperation({ mode: 'set' });
const reorderOperations = (
sourceOperations = [],
sourceId,
targetId,
position = 'before',
) => {
if (!sourceId || !targetId || sourceId === targetId) {
return sourceOperations;
}
const sourceIndex = sourceOperations.findIndex((item) => item.id === sourceId);
if (sourceIndex < 0) {
return sourceOperations;
}
const nextOperations = [...sourceOperations];
const [moved] = nextOperations.splice(sourceIndex, 1);
let insertIndex = nextOperations.findIndex((item) => item.id === targetId);
if (insertIndex < 0) {
return sourceOperations;
}
if (position === 'after') {
insertIndex += 1;
}
nextOperations.splice(insertIndex, 0, moved);
return nextOperations;
};
const getOperationSummary = (operation = {}, index = 0) => {
const mode = operation.mode || 'set';
const modeLabel = OPERATION_MODE_LABEL_MAP[mode] || mode;
@@ -1015,8 +1077,12 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
const [operationSearch, setOperationSearch] = useState('');
const [selectedOperationId, setSelectedOperationId] = useState('');
const [expandedConditionMap, setExpandedConditionMap] = useState({});
const [draggedOperationId, setDraggedOperationId] = useState('');
const [dragOverOperationId, setDragOverOperationId] = useState('');
const [dragOverPosition, setDragOverPosition] = useState('before');
const [templateGroupKey, setTemplateGroupKey] = useState('basic');
const [templatePresetKey, setTemplatePresetKey] = useState('operations_default');
const [headerValueExampleVisible, setHeaderValueExampleVisible] = useState(false);
const [fieldGuideVisible, setFieldGuideVisible] = useState(false);
const [fieldGuideTarget, setFieldGuideTarget] = useState('path');
const [fieldGuideKeyword, setFieldGuideKeyword] = useState('');
@@ -1033,6 +1099,9 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
setOperationSearch('');
setSelectedOperationId(nextState.operations[0]?.id || '');
setExpandedConditionMap({});
setDraggedOperationId('');
setDragOverOperationId('');
setDragOverPosition('before');
if (nextState.visualMode === 'legacy') {
setTemplateGroupKey('basic');
setTemplatePresetKey('legacy_default');
@@ -1040,6 +1109,7 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
setTemplateGroupKey('basic');
setTemplatePresetKey('operations_default');
}
setHeaderValueExampleVisible(false);
setFieldGuideVisible(false);
setFieldGuideTarget('path');
setFieldGuideKeyword('');
@@ -1086,6 +1156,7 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
if (!keyword) return operations;
return operations.filter((operation) => {
const searchableText = [
operation.description,
operation.mode,
operation.path,
operation.from,
@@ -1151,10 +1222,14 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
const payloadOps = filteredOps.map((operation) => {
const mode = operation.mode || 'set';
const meta = MODE_META[mode] || MODE_META.set;
const descriptionValue = String(operation.description || '').trim();
const pathValue = operation.path.trim();
const fromValue = operation.from.trim();
const toValue = operation.to.trim();
const payload = { mode };
if (descriptionValue) {
payload.description = descriptionValue;
}
if (meta.path) {
payload.path = pathValue;
}
@@ -1556,6 +1631,67 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
setSelectedOperationId(created.id);
};
const resetOperationDragState = useCallback(() => {
setDraggedOperationId('');
setDragOverOperationId('');
setDragOverPosition('before');
}, []);
const moveOperation = useCallback(
(sourceId, targetId, position = 'before') => {
if (!sourceId || !targetId || sourceId === targetId) {
return;
}
setOperations((prev) =>
reorderOperations(prev, sourceId, targetId, position),
);
setSelectedOperationId(sourceId);
},
[],
);
const handleOperationDragStart = useCallback((event, operationId) => {
setDraggedOperationId(operationId);
setSelectedOperationId(operationId);
event.dataTransfer.effectAllowed = 'move';
event.dataTransfer.setData('text/plain', operationId);
}, []);
const handleOperationDragOver = useCallback(
(event, operationId) => {
event.preventDefault();
if (!draggedOperationId || draggedOperationId === operationId) {
return;
}
const rect = event.currentTarget.getBoundingClientRect();
const position =
event.clientY - rect.top > rect.height / 2 ? 'after' : 'before';
setDragOverOperationId(operationId);
setDragOverPosition(position);
event.dataTransfer.dropEffect = 'move';
},
[draggedOperationId],
);
const handleOperationDrop = useCallback(
(event, operationId) => {
event.preventDefault();
const sourceId =
draggedOperationId || event.dataTransfer.getData('text/plain');
const position =
dragOverOperationId === operationId ? dragOverPosition : 'before';
moveOperation(sourceId, operationId, position);
resetOperationDragState();
},
[
dragOverOperationId,
dragOverPosition,
draggedOperationId,
moveOperation,
resetOperationDragState,
],
);
const duplicateOperation = (operationId) => {
let insertedId = '';
setOperations((prev) => {
@@ -1563,6 +1699,7 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
if (index < 0) return prev;
const source = prev[index];
const cloned = normalizeOperation({
description: source.description,
path: source.path,
mode: source.mode,
value: parseLooseValue(source.value_text),
@@ -1812,14 +1949,6 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
{t('重置')}
</Button>
</Space>
<Text
type='tertiary'
size='small'
className='cursor-pointer select-none mt-1 whitespace-nowrap'
onClick={() => openFieldGuide('path')}
>
{t('字段速查')}
</Text>
</div>
</Card>
@@ -1891,7 +2020,7 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
<Input
value={operationSearch}
placeholder={t('搜索规则(类型 / 路径 / 来源 / 目标)')}
placeholder={t('搜索规则(描述 / 类型 / 路径 / 来源 / 目标)')}
onChange={(nextValue) =>
setOperationSearch(nextValue || '')
}
@@ -1921,14 +2050,31 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
);
const isActive =
operation.id === selectedOperationId;
const isDragging =
operation.id === draggedOperationId;
const isDropTarget =
operation.id === dragOverOperationId &&
draggedOperationId &&
draggedOperationId !== operation.id;
return (
<div
key={operation.id}
role='button'
tabIndex={0}
draggable={operations.length > 1}
onClick={() =>
setSelectedOperationId(operation.id)
}
onDragStart={(event) =>
handleOperationDragStart(event, operation.id)
}
onDragOver={(event) =>
handleOperationDragOver(event, operation.id)
}
onDrop={(event) =>
handleOperationDrop(event, operation.id)
}
onDragEnd={resetOperationDragState}
onKeyDown={(event) => {
if (
event.key === 'Enter' ||
@@ -1946,18 +2092,53 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
border: isActive
? '1px solid var(--semi-color-primary)'
: '1px solid var(--semi-color-border)',
opacity: isDragging ? 0.6 : 1,
boxShadow: isDropTarget
? dragOverPosition === 'after'
? 'inset 0 -3px 0 var(--semi-color-primary)'
: 'inset 0 3px 0 var(--semi-color-primary)'
: 'none',
}}
>
<div className='flex items-start justify-between gap-2'>
<div>
<Text strong>{`#${index + 1}`}</Text>
<Text
type='tertiary'
size='small'
className='block mt-1'
<div className='flex items-start gap-2 min-w-0'>
<div
className='flex-shrink-0'
style={{
color: 'var(--semi-color-text-2)',
cursor: operations.length > 1 ? 'grab' : 'default',
marginTop: 1,
}}
>
{getOperationSummary(operation, index)}
</Text>
<IconMenu />
</div>
<div className='min-w-0'>
<Text strong>{`#${index + 1}`}</Text>
<Text
type='tertiary'
size='small'
className='block mt-1'
>
{getOperationSummary(operation, index)}
</Text>
{String(operation.description || '').trim() ? (
<Text
type='tertiary'
size='small'
className='block mt-1'
style={{
lineHeight: 1.5,
wordBreak: 'break-word',
overflow: 'hidden',
display: '-webkit-box',
WebkitLineClamp: 2,
WebkitBoxOrient: 'vertical',
}}
>
{operation.description}
</Text>
) : null}
</div>
</div>
<Tag size='small' color='grey'>
{(operation.conditions || []).length}
@@ -2035,6 +2216,7 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
type='danger'
theme='borderless'
icon={<IconDelete />}
aria-label={t('删除规则')}
onClick={() =>
removeOperation(selectedOperation.id)
}
@@ -2085,6 +2267,25 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
>
{MODE_DESCRIPTIONS[mode] || ''}
</Text>
<div className='mt-2'>
<Text type='tertiary' size='small'>
{t('规则描述(可选)')}
</Text>
<Input
value={selectedOperation.description || ''}
placeholder={t('例如:清理工具参数,避免上游校验错误')}
onChange={(nextValue) =>
updateOperation(selectedOperation.id, {
description: nextValue || '',
})
}
maxLength={180}
showClear
/>
<Text type='tertiary' size='small' className='mt-1 block'>
{`${String(selectedOperation.description || '').length}/180`}
</Text>
</div>
{meta.value ? (
mode === 'return_error' && returnErrorDraft ? (
@@ -2631,15 +2832,35 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
{t(getModeValueLabel(mode))}
</Text>
{mode === 'set_header' ? (
<Button
size='small'
type='tertiary'
onClick={formatSelectedOperationValueAsJson}
>
{t('格式化 JSON')}
</Button>
<Space spacing={6}>
<Button
size='small'
type='tertiary'
onClick={() =>
setHeaderValueExampleVisible(true)
}
>
{t('查看 JSON 示例')}
</Button>
<Button
size='small'
type='tertiary'
onClick={formatSelectedOperationValueAsJson}
>
{t('格式化 JSON')}
</Button>
</Space>
) : null}
</div>
{mode === 'set_header' ? (
<Text
type='tertiary'
size='small'
className='mt-1 mb-2 block'
>
{t('纯字符串会直接覆盖整条请求头,或者点击“查看 JSON 示例”按 token 规则处理。')}
</Text>
) : null}
<TextArea
value={selectedOperation.value_text}
autosize={{ minRows: 1, maxRows: 4 }}
@@ -2650,11 +2871,6 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
})
}
/>
{getModeValueHelp(mode) ? (
<Text type='tertiary' size='small'>
{t(getModeValueHelp(mode))}
</Text>
) : null}
</div>
)
) : null}
@@ -3110,6 +3326,27 @@ const ParamOverrideEditorModal = ({ visible, value, onSave, onCancel }) => {
</Space>
</Modal>
<Modal
title={t('anthropic-beta JSON 示例')}
visible={headerValueExampleVisible}
width={760}
footer={null}
onCancel={() => setHeaderValueExampleVisible(false)}
bodyStyle={{ padding: 16, paddingBottom: 24 }}
>
<Space vertical align='start' spacing={12} style={{ width: '100%' }}>
<Text type='tertiary' size='small'>
{t('下面是带注释的示例,仅用于参考;实际保存时请删除注释。')}
</Text>
<TextArea
value={HEADER_VALUE_JSONC_EXAMPLE}
readOnly
autosize={{ minRows: 16, maxRows: 20 }}
style={{ marginBottom: 8 }}
/>
</Space>
</Modal>
<Modal
title={null}
visible={fieldGuideVisible}

View File

@@ -25,6 +25,7 @@ const PricingDisplaySettings = ({
setShowWithRecharge,
currency,
setCurrency,
siteDisplayType,
showRatio,
setShowRatio,
viewMode,
@@ -34,11 +35,17 @@ const PricingDisplaySettings = ({
loading = false,
t,
}) => {
const supportsCurrencyDisplay = siteDisplayType !== 'TOKENS';
const items = [
{
value: 'recharge',
label: t('充值价格显示'),
},
...(supportsCurrencyDisplay
? [
{
value: 'recharge',
label: t('充值价格显示'),
},
]
: []),
{
value: 'ratio',
label: t('显示倍率'),
@@ -78,7 +85,7 @@ const PricingDisplaySettings = ({
const getActiveValues = () => {
const activeValues = [];
if (showWithRecharge) activeValues.push('recharge');
if (supportsCurrencyDisplay && showWithRecharge) activeValues.push('recharge');
if (showRatio) activeValues.push('ratio');
if (viewMode === 'table') activeValues.push('tableView');
if (tokenUnit === 'K') activeValues.push('tokenUnit');
@@ -98,7 +105,7 @@ const PricingDisplaySettings = ({
t={t}
/>
{showWithRecharge && (
{supportsCurrencyDisplay && showWithRecharge && (
<SelectableButtonGroup
title={t('货币单位')}
items={currencyItems}

View File

@@ -76,7 +76,6 @@ const PricingEndpointTypes = ({
value: 'all',
label: t('全部端点'),
tagCount: getEndpointTypeCount('all'),
disabled: models.length === 0,
},
...availableEndpointTypes.map((endpointType) => {
const count = getEndpointTypeCount(endpointType);
@@ -84,7 +83,6 @@ const PricingEndpointTypes = ({
value: endpointType,
label: getEndpointTypeLabel(endpointType),
tagCount: count,
disabled: count === 0,
};
}),
];
@@ -96,6 +94,7 @@ const PricingEndpointTypes = ({
activeValue={filterEndpointType}
onChange={setFilterEndpointType}
loading={loading}
variant='green'
t={t}
/>
);

View File

@@ -52,20 +52,19 @@ const PricingGroups = ({
.length;
let ratioDisplay = '';
if (g === 'all') {
ratioDisplay = t('全部');
// ratioDisplay = t('全部');
} else {
const ratio = groupRatio[g];
if (ratio !== undefined && ratio !== null) {
ratioDisplay = `x${ratio}`;
ratioDisplay = `${ratio}x`;
} else {
ratioDisplay = 'x1';
ratioDisplay = '1x';
}
}
return {
value: g,
label: g === 'all' ? t('全部分组') : g,
tagCount: ratioDisplay,
disabled: modelCount === 0,
};
});
@@ -76,6 +75,7 @@ const PricingGroups = ({
activeValue={filterGroup}
onChange={setFilterGroup}
loading={loading}
variant='teal'
t={t}
/>
);

View File

@@ -52,6 +52,7 @@ const PricingQuotaTypes = ({
activeValue={filterQuotaType}
onChange={setFilterQuotaType}
loading={loading}
variant='amber'
t={t}
/>
);

View File

@@ -78,7 +78,6 @@ const PricingTags = ({
value: 'all',
label: t('全部标签'),
tagCount: getTagCount('all'),
disabled: models.length === 0,
},
];
@@ -88,7 +87,6 @@ const PricingTags = ({
value: tag,
label: tag,
tagCount: count,
disabled: count === 0,
});
});
@@ -102,6 +100,7 @@ const PricingTags = ({
activeValue={filterTag}
onChange={setFilterTag}
loading={loading}
variant='rose'
t={t}
/>
);

View File

@@ -83,7 +83,6 @@ const PricingVendors = ({
value: 'all',
label: t('全部供应商'),
tagCount: getVendorCount('all'),
disabled: models.length === 0,
},
];
@@ -96,7 +95,6 @@ const PricingVendors = ({
label: vendor,
icon: icon ? getLobeHubIcon(icon, 16) : null,
tagCount: count,
disabled: count === 0,
});
});
@@ -107,7 +105,6 @@ const PricingVendors = ({
value: 'unknown',
label: t('未知供应商'),
tagCount: count,
disabled: count === 0,
});
}
@@ -121,6 +118,7 @@ const PricingVendors = ({
activeValue={filterVendor}
onChange={setFilterVendor}
loading={loading}
variant='violet'
t={t}
/>
);

View File

@@ -70,6 +70,7 @@ const PricingPage = () => {
groupRatio={pricingData.groupRatio}
usableGroup={pricingData.usableGroup}
currency={pricingData.currency}
siteDisplayType={pricingData.siteDisplayType}
tokenUnit={pricingData.tokenUnit}
displayPrice={pricingData.displayPrice}
showRatio={allProps.showRatio}

View File

@@ -113,15 +113,6 @@ const PricingSidebar = ({
t={t}
/>
<PricingTags
filterTag={filterTag}
setFilterTag={setFilterTag}
models={tagModels}
allModels={categoryProps.models}
loading={loading}
t={t}
/>
<PricingGroups
filterGroup={filterGroup}
setFilterGroup={handleGroupClick}
@@ -140,6 +131,15 @@ const PricingSidebar = ({
t={t}
/>
<PricingTags
filterTag={filterTag}
setFilterTag={setFilterTag}
models={tagModels}
allModels={categoryProps.models}
loading={loading}
t={t}
/>
<PricingEndpointTypes
filterEndpointType={filterEndpointType}
setFilterEndpointType={setFilterEndpointType}

View File

@@ -40,6 +40,7 @@ const PricingTopSection = memo(
setShowWithRecharge,
currency,
setCurrency,
siteDisplayType,
showRatio,
setShowRatio,
viewMode,
@@ -68,6 +69,7 @@ const PricingTopSection = memo(
setShowWithRecharge={setShowWithRecharge}
currency={currency}
setCurrency={setCurrency}
siteDisplayType={siteDisplayType}
showRatio={showRatio}
setShowRatio={setShowRatio}
viewMode={viewMode}
@@ -103,6 +105,7 @@ const PricingTopSection = memo(
setShowWithRecharge={setShowWithRecharge}
currency={currency}
setCurrency={setCurrency}
siteDisplayType={siteDisplayType}
showRatio={showRatio}
setShowRatio={setShowRatio}
viewMode={viewMode}

View File

@@ -35,6 +35,7 @@ const SearchActions = memo(
setShowWithRecharge,
currency,
setCurrency,
siteDisplayType,
showRatio,
setShowRatio,
viewMode,
@@ -43,6 +44,8 @@ const SearchActions = memo(
setTokenUnit,
t,
}) => {
const supportsCurrencyDisplay = siteDisplayType !== 'TOKENS';
const handleCopyClick = useCallback(() => {
if (copyText && selectedRowKeys.length > 0) {
copyText(selectedRowKeys);
@@ -91,16 +94,18 @@ const SearchActions = memo(
<Divider layout='vertical' margin='8px' />
{/* 充值价格显示开关 */}
<div className='flex items-center gap-2'>
<span className='text-sm text-gray-600'>{t('充值价格显示')}</span>
<Switch
checked={showWithRecharge}
onChange={setShowWithRecharge}
/>
</div>
{supportsCurrencyDisplay && (
<div className='flex items-center gap-2'>
<span className='text-sm text-gray-600'>{t('充值价格显示')}</span>
<Switch
checked={showWithRecharge}
onChange={setShowWithRecharge}
/>
</div>
)}
{/* 货币单位选择 */}
{showWithRecharge && (
{supportsCurrencyDisplay && showWithRecharge && (
<Select
value={currency}
onChange={setCurrency}

View File

@@ -35,6 +35,7 @@ const ModelDetailSideSheet = ({
modelData,
groupRatio,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
showRatio,
@@ -92,6 +93,7 @@ const ModelDetailSideSheet = ({
modelData={modelData}
groupRatio={groupRatio}
currency={currency}
siteDisplayType={siteDisplayType}
tokenUnit={tokenUnit}
displayPrice={displayPrice}
showRatio={showRatio}

View File

@@ -32,6 +32,7 @@ const FilterModalContent = ({ sidebarProps, t }) => {
setShowWithRecharge,
currency,
setCurrency,
siteDisplayType,
handleChange,
setActiveKey,
showRatio,
@@ -77,6 +78,7 @@ const FilterModalContent = ({ sidebarProps, t }) => {
setShowWithRecharge={setShowWithRecharge}
currency={currency}
setCurrency={setCurrency}
siteDisplayType={siteDisplayType}
showRatio={showRatio}
setShowRatio={setShowRatio}
viewMode={viewMode}
@@ -96,15 +98,6 @@ const FilterModalContent = ({ sidebarProps, t }) => {
t={t}
/>
<PricingTags
filterTag={filterTag}
setFilterTag={setFilterTag}
models={tagModels}
allModels={categoryProps.models}
loading={loading}
t={t}
/>
<PricingGroups
filterGroup={filterGroup}
setFilterGroup={setFilterGroup}
@@ -123,6 +116,15 @@ const FilterModalContent = ({ sidebarProps, t }) => {
t={t}
/>
<PricingTags
filterTag={filterTag}
setFilterTag={setFilterTag}
models={tagModels}
allModels={categoryProps.models}
loading={loading}
t={t}
/>
<PricingEndpointTypes
filterEndpointType={filterEndpointType}
setFilterEndpointType={setFilterEndpointType}

View File

@@ -20,7 +20,7 @@ For commercial licensing, please contact support@quantumnous.com
import React from 'react';
import { Card, Avatar, Typography, Table, Tag } from '@douyinfe/semi-ui';
import { IconCoinMoneyStroked } from '@douyinfe/semi-icons';
import { calculateModelPrice } from '../../../../../helpers';
import { calculateModelPrice, getModelPriceItems } from '../../../../../helpers';
const { Text } = Typography;
@@ -28,6 +28,7 @@ const ModelPricingTable = ({
modelData,
groupRatio,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
showRatio,
@@ -57,6 +58,7 @@ const ModelPricingTable = ({
tokenUnit,
displayPrice,
currency,
quotaDisplayType: siteDisplayType,
})
: { inputPrice: '-', outputPrice: '-', price: '-' };
@@ -74,12 +76,7 @@ const ModelPricingTable = ({
: modelData?.quota_type === 1
? t('按次计费')
: '-',
inputPrice: modelData?.quota_type === 0 ? priceData.inputPrice : '-',
outputPrice:
modelData?.quota_type === 0
? priceData.completionPrice || priceData.outputPrice
: '-',
fixedPrice: modelData?.quota_type === 1 ? priceData.price : '-',
priceItems: getModelPriceItems(priceData, t, siteDisplayType),
};
});
@@ -126,48 +123,22 @@ const ModelPricingTable = ({
},
});
// 根据计费类型添加价格列
if (modelData?.quota_type === 0) {
// 按量计费
columns.push(
{
title: t('提示'),
dataIndex: 'inputPrice',
render: (text) => (
<>
<div className='font-semibold text-orange-600'>{text}</div>
<div className='text-xs text-gray-500'>
/ {tokenUnit === 'K' ? '1K' : '1M'} tokens
columns.push({
title: siteDisplayType === 'TOKENS' ? t('计费摘要') : t('价格摘要'),
dataIndex: 'priceItems',
render: (items) => (
<div className='space-y-1'>
{items.map((item) => (
<div key={item.key}>
<div className='font-semibold text-orange-600'>
{item.label} {item.value}
</div>
</>
),
},
{
title: t('补全'),
dataIndex: 'outputPrice',
render: (text) => (
<>
<div className='font-semibold text-orange-600'>{text}</div>
<div className='text-xs text-gray-500'>
/ {tokenUnit === 'K' ? '1K' : '1M'} tokens
</div>
</>
),
},
);
} else {
// 按次计费
columns.push({
title: t('价格'),
dataIndex: 'fixedPrice',
render: (text) => (
<>
<div className='font-semibold text-orange-600'>{text}</div>
<div className='text-xs text-gray-500'>/ </div>
</>
),
});
}
<div className='text-xs text-gray-500'>{item.suffix}</div>
</div>
))}
</div>
),
});
return (
<Table

View File

@@ -67,6 +67,7 @@ const PricingCardView = ({
setModalImageUrl,
setIsModalOpenurl,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
showRatio,
@@ -246,6 +247,7 @@ const PricingCardView = ({
tokenUnit,
displayPrice,
currency,
quotaDisplayType: siteDisplayType,
});
return (
@@ -264,8 +266,8 @@ const PricingCardView = ({
<h3 className='text-lg font-bold text-gray-900 truncate'>
{model.model_name}
</h3>
<div className='flex items-center gap-3 text-xs mt-1'>
{formatPriceInfo(priceData, t)}
<div className='flex flex-col gap-1 text-xs mt-1'>
{formatPriceInfo(priceData, t, siteDisplayType)}
</div>
</div>
</div>

View File

@@ -37,6 +37,7 @@ const PricingTable = ({
setModalImageUrl,
setIsModalOpenurl,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
searchValue,
@@ -54,6 +55,7 @@ const PricingTable = ({
setModalImageUrl,
setIsModalOpenurl,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
showRatio,
@@ -66,6 +68,7 @@ const PricingTable = ({
setModalImageUrl,
setIsModalOpenurl,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
showRatio,

View File

@@ -24,6 +24,7 @@ import {
renderModelTag,
stringToColor,
calculateModelPrice,
getModelPriceItems,
getLobeHubIcon,
} from '../../../../../helpers';
import {
@@ -108,6 +109,7 @@ export const getPricingTableColumns = ({
setModalImageUrl,
setIsModalOpenurl,
currency,
siteDisplayType,
tokenUnit,
displayPrice,
showRatio,
@@ -125,6 +127,7 @@ export const getPricingTableColumns = ({
tokenUnit,
displayPrice,
currency,
quotaDisplayType: siteDisplayType,
});
priceDataCache.set(record, cache);
}
@@ -226,31 +229,23 @@ export const getPricingTableColumns = ({
};
const priceColumn = {
title: t('模型价格'),
title: siteDisplayType === 'TOKENS' ? t('计费摘要') : t('模型价格'),
dataIndex: 'model_price',
...(isMobile ? {} : { fixed: 'right' }),
render: (text, record, index) => {
const priceData = getPriceData(record);
const priceItems = getModelPriceItems(priceData, t, siteDisplayType);
if (priceData.isPerToken) {
return (
<div className='space-y-1'>
<div className='text-gray-700'>
{t('输入')} {priceData.inputPrice} / 1{priceData.unitLabel} tokens
return (
<div className='space-y-1'>
{priceItems.map((item) => (
<div key={item.key} className='text-gray-700'>
{item.label} {item.value}
{item.suffix}
</div>
<div className='text-gray-700'>
{t('输出')} {priceData.completionPrice} / 1{priceData.unitLabel}{' '}
tokens
</div>
</div>
);
} else {
return (
<div className='text-gray-700'>
{t('模型价格')}{priceData.price}
</div>
);
}
))}
</div>
);
},
};

View File

@@ -29,7 +29,6 @@ const TokensActions = ({
setShowEdit,
batchCopyTokens,
batchDeleteTokens,
copyText,
t,
}) => {
// Modal states
@@ -99,8 +98,7 @@ const TokensActions = ({
<CopyTokensModal
visible={showCopyModal}
onCancel={() => setShowCopyModal(false)}
selectedKeys={selectedKeys}
copyText={copyText}
batchCopyTokens={batchCopyTokens}
t={t}
/>

View File

@@ -108,17 +108,28 @@ const renderGroupColumn = (text, record, t) => {
};
// Render token key column with show/hide and copy functionality
const renderTokenKey = (text, record, showKeys, setShowKeys, copyText) => {
const fullKey = 'sk-' + record.key;
const maskedKey =
'sk-' + record.key.slice(0, 4) + '**********' + record.key.slice(-4);
const renderTokenKey = (
text,
record,
showKeys,
resolvedTokenKeys,
loadingTokenKeys,
toggleTokenVisibility,
copyTokenKey,
) => {
const revealed = !!showKeys[record.id];
const loading = !!loadingTokenKeys[record.id];
const keyValue =
revealed && resolvedTokenKeys[record.id]
? resolvedTokenKeys[record.id]
: record.key || '';
const displayedKey = keyValue ? `sk-${keyValue}` : '';
return (
<div className='w-[200px]'>
<Input
readOnly
value={revealed ? fullKey : maskedKey}
value={displayedKey}
size='small'
suffix={
<div className='flex items-center'>
@@ -127,10 +138,11 @@ const renderTokenKey = (text, record, showKeys, setShowKeys, copyText) => {
size='small'
type='tertiary'
icon={revealed ? <IconEyeClosed /> : <IconEyeOpened />}
loading={loading}
aria-label='toggle token visibility'
onClick={(e) => {
onClick={async (e) => {
e.stopPropagation();
setShowKeys((prev) => ({ ...prev, [record.id]: !revealed }));
await toggleTokenVisibility(record);
}}
/>
<Button
@@ -138,10 +150,11 @@ const renderTokenKey = (text, record, showKeys, setShowKeys, copyText) => {
size='small'
type='tertiary'
icon={<IconCopy />}
loading={loading}
aria-label='copy token key'
onClick={async (e) => {
e.stopPropagation();
await copyText(fullKey);
await copyTokenKey(record);
}}
/>
</div>
@@ -427,8 +440,10 @@ const renderOperations = (
export const getTokensColumns = ({
t,
showKeys,
setShowKeys,
copyText,
resolvedTokenKeys,
loadingTokenKeys,
toggleTokenVisibility,
copyTokenKey,
manageToken,
onOpenLink,
setEditingToken,
@@ -461,7 +476,15 @@ export const getTokensColumns = ({
title: t('密钥'),
key: 'token_key',
render: (text, record) =>
renderTokenKey(text, record, showKeys, setShowKeys, copyText),
renderTokenKey(
text,
record,
showKeys,
resolvedTokenKeys,
loadingTokenKeys,
toggleTokenVisibility,
copyTokenKey,
),
},
{
title: t('可用模型'),

View File

@@ -39,8 +39,10 @@ const TokensTable = (tokensData) => {
rowSelection,
handleRow,
showKeys,
setShowKeys,
copyText,
resolvedTokenKeys,
loadingTokenKeys,
toggleTokenVisibility,
copyTokenKey,
manageToken,
onOpenLink,
setEditingToken,
@@ -54,8 +56,10 @@ const TokensTable = (tokensData) => {
return getTokensColumns({
t,
showKeys,
setShowKeys,
copyText,
resolvedTokenKeys,
loadingTokenKeys,
toggleTokenVisibility,
copyTokenKey,
manageToken,
onOpenLink,
setEditingToken,
@@ -65,8 +69,10 @@ const TokensTable = (tokensData) => {
}, [
t,
showKeys,
setShowKeys,
copyText,
resolvedTokenKeys,
loadingTokenKeys,
toggleTokenVisibility,
copyTokenKey,
manageToken,
onOpenLink,
setEditingToken,

View File

@@ -58,6 +58,7 @@ function TokensPage() {
t: (k) => k,
selectedModel: '',
prefillKey: '',
fetchTokenKey: async () => '',
});
const [modelOptions, setModelOptions] = useState([]);
const [selectedModel, setSelectedModel] = useState('');
@@ -74,6 +75,7 @@ function TokensPage() {
t: tokensData.t,
selectedModel,
prefillKey,
fetchTokenKey: tokensData.fetchTokenKey,
};
}, [
tokensData.tokens,
@@ -81,6 +83,7 @@ function TokensPage() {
tokensData.t,
selectedModel,
prefillKey,
tokensData.fetchTokenKey,
]);
const loadModels = async () => {
@@ -198,13 +201,14 @@ function TokensPage() {
openCCSwitchModalRef.current = openCCSwitchModal;
// Prefill to Fluent handler
const handlePrefillToFluent = () => {
const handlePrefillToFluent = async () => {
const {
tokens,
selectedKeys,
t,
selectedModel: chosenModel,
prefillKey: overrideKey,
fetchTokenKey,
} = latestRef.current;
const container = document.getElementById('fluent-new-api-container');
if (!container) {
@@ -241,7 +245,11 @@ function TokensPage() {
Toast.warning(t('没有可用令牌用于填充'));
return;
}
apiKeyToUse = 'sk-' + token.key;
try {
apiKeyToUse = 'sk-' + (await fetchTokenKey(token));
} catch (_) {
return;
}
}
const payload = {
@@ -351,7 +359,6 @@ function TokensPage() {
setShowEdit,
batchCopyTokens,
batchDeleteTokens,
copyText,
// Filters state
formInitValues,
@@ -401,7 +408,6 @@ function TokensPage() {
setShowEdit={setShowEdit}
batchCopyTokens={batchCopyTokens}
batchDeleteTokens={batchDeleteTokens}
copyText={copyText}
t={t}
/>

View File

@@ -116,8 +116,7 @@ export default function CCSwitchModal({
Toast.warning(t('请选择主模型'));
return;
}
const apiKey = 'sk-' + tokenKey;
const url = buildCCSwitchURL(app, name, models, apiKey);
const url = buildCCSwitchURL(app, name, models, 'sk-' + tokenKey);
window.open(url, '_blank');
onClose();
};

View File

@@ -20,24 +20,21 @@ For commercial licensing, please contact support@quantumnous.com
import React from 'react';
import { Modal, Button, Space } from '@douyinfe/semi-ui';
const CopyTokensModal = ({ visible, onCancel, selectedKeys, copyText, t }) => {
const CopyTokensModal = ({
visible,
onCancel,
batchCopyTokens,
t,
}) => {
// Handle copy with name and key format
const handleCopyWithName = async () => {
let content = '';
for (let i = 0; i < selectedKeys.length; i++) {
content += selectedKeys[i].name + ' sk-' + selectedKeys[i].key + '\n';
}
await copyText(content);
await batchCopyTokens('name+key');
onCancel();
};
// Handle copy with key only format
const handleCopyKeyOnly = async () => {
let content = '';
for (let i = 0; i < selectedKeys.length; i++) {
content += 'sk-' + selectedKeys[i].key + '\n';
}
await copyText(content);
await batchCopyTokens('key-only');
onCancel();
};

View File

@@ -337,6 +337,7 @@ export const getLogsColumns = ({
showUserInfoFunc,
openChannelAffinityUsageCacheModal,
isAdminUser,
billingDisplayMode = 'price',
}) => {
return [
{
@@ -761,11 +762,10 @@ export const getLogsColumns = ({
Boolean(other?.violation_fee_marker)
) {
const feeQuota = other?.fee_quota ?? record?.quota;
const ratioText = formatRatio(other?.group_ratio);
const summary = [
t('违规扣费'),
`${t('分组倍率')}${ratioText}`,
`${t('扣费')}${renderQuota(feeQuota, 6)}`,
`${t('分组倍率')}${formatRatio(other?.group_ratio)}`,
text ? `${t('详情')}${text}` : null,
]
.filter(Boolean)
@@ -808,6 +808,7 @@ export const getLogsColumns = ({
1.0,
other?.is_system_prompt_overwritten,
'claude',
billingDisplayMode,
)
: renderModelPriceSimple(
other.model_ratio,
@@ -826,6 +827,7 @@ export const getLogsColumns = ({
1.0,
other?.is_system_prompt_overwritten,
'openai',
billingDisplayMode,
);
return (
<Typography.Paragraph

View File

@@ -43,6 +43,7 @@ const LogsTable = (logsData) => {
openChannelAffinityUsageCacheModal,
hasExpandableRows,
isAdminUser,
billingDisplayMode,
t,
COLUMN_KEYS,
} = logsData;
@@ -56,6 +57,7 @@ const LogsTable = (logsData) => {
showUserInfoFunc,
openChannelAffinityUsageCacheModal,
isAdminUser,
billingDisplayMode,
});
}, [
t,
@@ -64,6 +66,7 @@ const LogsTable = (logsData) => {
showUserInfoFunc,
openChannelAffinityUsageCacheModal,
isAdminUser,
billingDisplayMode,
]);
// Filter columns based on visibility settings

View File

@@ -18,7 +18,7 @@ For commercial licensing, please contact support@quantumnous.com
*/
import React from 'react';
import { Modal, Button, Checkbox } from '@douyinfe/semi-ui';
import { Modal, Button, Checkbox, RadioGroup, Radio } from '@douyinfe/semi-ui';
import { getLogsColumns } from '../UsageLogsColumnDefs';
const ColumnSelectorModal = ({
@@ -28,12 +28,18 @@ const ColumnSelectorModal = ({
handleColumnVisibilityChange,
handleSelectAll,
initDefaultColumns,
billingDisplayMode,
setBillingDisplayMode,
COLUMN_KEYS,
isAdminUser,
copyText,
showUserInfoFunc,
t,
}) => {
const isTokensDisplay =
typeof localStorage !== 'undefined' &&
localStorage.getItem('quota_display_type') === 'TOKENS';
// Get all columns for display in selector
const allColumns = getLogsColumns({
t,
@@ -41,6 +47,7 @@ const ColumnSelectorModal = ({
copyText,
showUserInfoFunc,
isAdminUser,
billingDisplayMode,
});
return (
@@ -61,6 +68,21 @@ const ColumnSelectorModal = ({
}
>
<div style={{ marginBottom: 20 }}>
<div style={{ marginBottom: 16 }}>
<div style={{ marginBottom: 8, fontWeight: 600 }}>{t('计费显示模式')}</div>
<RadioGroup
type='button'
value={billingDisplayMode}
onChange={(value) => setBillingDisplayMode(value)}
>
<Radio value='price'>
{isTokensDisplay ? t('价格模式') : t('价格模式(默认)')}
</Radio>
<Radio value='ratio'>
{isTokensDisplay ? t('倍率模式(默认)') : t('倍率模式')}
</Radio>
</RadioGroup>
</div>
<Checkbox
checked={Object.values(visibleColumns).every((v) => v === true)}
indeterminate={

View File

@@ -191,4 +191,9 @@ export const CHANNEL_OPTIONS = [
},
];
// Channel types that support upstream model list fetching in UI.
export const MODEL_FETCHABLE_CHANNEL_TYPES = new Set([
1, 4, 14, 34, 17, 26, 27, 24, 47, 25, 20, 23, 31, 40, 42, 48, 43,
]);
export const MODEL_TABLE_PAGE_SIZE = 10;

View File

@@ -20,6 +20,7 @@ For commercial licensing, please contact support@quantumnous.com
import React, { useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { reducer, initialState } from './reducer';
import { normalizeLanguage } from '../../i18n/language';
export const UserContext = React.createContext({
state: initialState,
@@ -35,8 +36,12 @@ export const UserProvider = ({ children }) => {
if (state.user?.setting) {
try {
const settings = JSON.parse(state.user.setting);
if (settings.language && settings.language !== i18n.language) {
i18n.changeLanguage(settings.language);
const normalizedLanguage = normalizeLanguage(settings.language);
if (normalizedLanguage && normalizedLanguage !== i18n.language) {
i18n.changeLanguage(normalizedLanguage);
}
if (normalizedLanguage) {
localStorage.setItem('i18nextLng', normalizedLanguage);
}
} catch (e) {
// Ignore parse errors

Some files were not shown because too many files have changed in this diff Show More