Files
new-api/setting/model_setting/global.go
Seefs d7d3a2f763 feat: channel affinity (#2669)
* feat: channel affinity

* feat: channel affinity -> model setting

* fix: channel affinity

* feat: channel affinity op

* feat: channel_type setting

* feat: clean

* feat: cache supports both memory and Redis.

* feat: Optimise ui/ux

* feat: Optimise ui/ux

* feat: Optimise codex usage ui/ux

* feat: Optimise ui/ux

* feat: Optimise ui/ux

* feat: Optimise ui/ux

* feat: If the affinitized channel fails and a retry succeeds on another channel, update the affinity to the successful channel
2026-01-26 19:57:41 +08:00

80 lines
2.1 KiB
Go

package model_setting
import (
"slices"
"strings"
"github.com/QuantumNous/new-api/setting/config"
)
type ChatCompletionsToResponsesPolicy struct {
Enabled bool `json:"enabled"`
AllChannels bool `json:"all_channels"`
ChannelIDs []int `json:"channel_ids,omitempty"`
ChannelTypes []int `json:"channel_types,omitempty"`
ModelPatterns []string `json:"model_patterns,omitempty"`
}
func (p ChatCompletionsToResponsesPolicy) IsChannelEnabled(channelID int, channelType int) bool {
if !p.Enabled {
return false
}
if p.AllChannels {
return true
}
if channelID > 0 && len(p.ChannelIDs) > 0 && slices.Contains(p.ChannelIDs, channelID) {
return true
}
if channelType > 0 && len(p.ChannelTypes) > 0 && slices.Contains(p.ChannelTypes, channelType) {
return true
}
return false
}
type GlobalSettings struct {
PassThroughRequestEnabled bool `json:"pass_through_request_enabled"`
ThinkingModelBlacklist []string `json:"thinking_model_blacklist"`
ChatCompletionsToResponsesPolicy ChatCompletionsToResponsesPolicy `json:"chat_completions_to_responses_policy"`
}
// 默认配置
var defaultOpenaiSettings = GlobalSettings{
PassThroughRequestEnabled: false,
ThinkingModelBlacklist: []string{
"moonshotai/kimi-k2-thinking",
"kimi-k2-thinking",
},
ChatCompletionsToResponsesPolicy: ChatCompletionsToResponsesPolicy{
Enabled: false,
AllChannels: true,
},
}
// 全局实例
var globalSettings = defaultOpenaiSettings
func init() {
// 注册到全局配置管理器
config.GlobalConfig.Register("global", &globalSettings)
}
func GetGlobalSettings() *GlobalSettings {
return &globalSettings
}
// ShouldPreserveThinkingSuffix 判断模型是否配置为保留 thinking/-nothinking/-low/-high/-medium 后缀
func ShouldPreserveThinkingSuffix(modelName string) bool {
target := strings.TrimSpace(modelName)
if target == "" {
return false
}
for _, entry := range globalSettings.ThinkingModelBlacklist {
if strings.TrimSpace(entry) == target {
return true
}
}
return false
}