Files
new-api/setting/model_setting/global.go
Seefs 62b796fa6a feat: /v1/chat/completion -> /v1/response (#2629)
* feat: /v1/chat/completion -> /v1/response
2026-01-11 21:38:07 +08:00

75 lines
1.9 KiB
Go

package model_setting
import (
"slices"
"strings"
"github.com/QuantumNous/new-api/setting/config"
)
type ChatCompletionsToResponsesPolicy struct {
Enabled bool `json:"enabled"`
AllChannels bool `json:"all_channels"`
ChannelIDs []int `json:"channel_ids,omitempty"`
ModelPatterns []string `json:"model_patterns,omitempty"`
}
func (p ChatCompletionsToResponsesPolicy) IsChannelEnabled(channelID int) bool {
if !p.Enabled {
return false
}
if p.AllChannels {
return true
}
if channelID == 0 || len(p.ChannelIDs) == 0 {
return false
}
return slices.Contains(p.ChannelIDs, channelID)
}
type GlobalSettings struct {
PassThroughRequestEnabled bool `json:"pass_through_request_enabled"`
ThinkingModelBlacklist []string `json:"thinking_model_blacklist"`
ChatCompletionsToResponsesPolicy ChatCompletionsToResponsesPolicy `json:"chat_completions_to_responses_policy"`
}
// 默认配置
var defaultOpenaiSettings = GlobalSettings{
PassThroughRequestEnabled: false,
ThinkingModelBlacklist: []string{
"moonshotai/kimi-k2-thinking",
"kimi-k2-thinking",
},
ChatCompletionsToResponsesPolicy: ChatCompletionsToResponsesPolicy{
Enabled: false,
AllChannels: true,
},
}
// 全局实例
var globalSettings = defaultOpenaiSettings
func init() {
// 注册到全局配置管理器
config.GlobalConfig.Register("global", &globalSettings)
}
func GetGlobalSettings() *GlobalSettings {
return &globalSettings
}
// ShouldPreserveThinkingSuffix 判断模型是否配置为保留 thinking/-nothinking/-low/-high/-medium 后缀
func ShouldPreserveThinkingSuffix(modelName string) bool {
target := strings.TrimSpace(modelName)
if target == "" {
return false
}
for _, entry := range globalSettings.ThinkingModelBlacklist {
if strings.TrimSpace(entry) == target {
return true
}
}
return false
}