feat: /v1/chat/completion -> /v1/response (#2629)

* feat: /v1/chat/completion -> /v1/response
This commit is contained in:
Seefs
2026-01-11 21:38:07 +08:00
committed by GitHub
parent 1a5c8f3c35
commit 62b796fa6a
20 changed files with 1134 additions and 14 deletions

View File

@@ -0,0 +1,18 @@
package service
import (
"github.com/QuantumNous/new-api/dto"
"github.com/QuantumNous/new-api/service/openaicompat"
)
func ChatCompletionsRequestToResponsesRequest(req *dto.GeneralOpenAIRequest) (*dto.OpenAIResponsesRequest, error) {
return openaicompat.ChatCompletionsRequestToResponsesRequest(req)
}
func ResponsesResponseToChatCompletionsResponse(resp *dto.OpenAIResponsesResponse, id string) (*dto.OpenAITextResponse, *dto.Usage, error) {
return openaicompat.ResponsesResponseToChatCompletionsResponse(resp, id)
}
func ExtractOutputTextFromResponses(resp *dto.OpenAIResponsesResponse) string {
return openaicompat.ExtractOutputTextFromResponses(resp)
}

View File

@@ -0,0 +1,14 @@
package service
import (
"github.com/QuantumNous/new-api/service/openaicompat"
"github.com/QuantumNous/new-api/setting/model_setting"
)
func ShouldChatCompletionsUseResponsesPolicy(policy model_setting.ChatCompletionsToResponsesPolicy, channelID int, model string) bool {
return openaicompat.ShouldChatCompletionsUseResponsesPolicy(policy, channelID, model)
}
func ShouldChatCompletionsUseResponsesGlobal(channelID int, model string) bool {
return openaicompat.ShouldChatCompletionsUseResponsesGlobal(channelID, model)
}

View File

@@ -0,0 +1,262 @@
package openaicompat
import (
"encoding/json"
"errors"
"fmt"
"strings"
"github.com/QuantumNous/new-api/common"
"github.com/QuantumNous/new-api/dto"
)
func normalizeChatImageURLToString(v any) any {
switch vv := v.(type) {
case string:
return vv
case map[string]any:
if url := common.Interface2String(vv["url"]); url != "" {
return url
}
return v
case dto.MessageImageUrl:
if vv.Url != "" {
return vv.Url
}
return v
case *dto.MessageImageUrl:
if vv != nil && vv.Url != "" {
return vv.Url
}
return v
default:
return v
}
}
func ChatCompletionsRequestToResponsesRequest(req *dto.GeneralOpenAIRequest) (*dto.OpenAIResponsesRequest, error) {
if req == nil {
return nil, errors.New("request is nil")
}
if req.Model == "" {
return nil, errors.New("model is required")
}
if req.N > 1 {
return nil, fmt.Errorf("n>1 is not supported in responses compatibility mode")
}
var instructionsParts []string
inputItems := make([]map[string]any, 0, len(req.Messages))
for _, msg := range req.Messages {
role := strings.TrimSpace(msg.Role)
if role == "" {
continue
}
// Prefer mapping system/developer messages into `instructions`.
if role == "system" || role == "developer" {
if msg.Content == nil {
continue
}
if msg.IsStringContent() {
if s := strings.TrimSpace(msg.StringContent()); s != "" {
instructionsParts = append(instructionsParts, s)
}
continue
}
parts := msg.ParseContent()
var sb strings.Builder
for _, part := range parts {
if part.Type == dto.ContentTypeText && strings.TrimSpace(part.Text) != "" {
if sb.Len() > 0 {
sb.WriteString("\n")
}
sb.WriteString(part.Text)
}
}
if s := strings.TrimSpace(sb.String()); s != "" {
instructionsParts = append(instructionsParts, s)
}
continue
}
item := map[string]any{
"role": role,
}
if msg.Content == nil {
item["content"] = ""
inputItems = append(inputItems, item)
continue
}
if msg.IsStringContent() {
item["content"] = msg.StringContent()
inputItems = append(inputItems, item)
continue
}
parts := msg.ParseContent()
contentParts := make([]map[string]any, 0, len(parts))
for _, part := range parts {
switch part.Type {
case dto.ContentTypeText:
contentParts = append(contentParts, map[string]any{
"type": "input_text",
"text": part.Text,
})
case dto.ContentTypeImageURL:
contentParts = append(contentParts, map[string]any{
"type": "input_image",
"image_url": normalizeChatImageURLToString(part.ImageUrl),
})
case dto.ContentTypeInputAudio:
contentParts = append(contentParts, map[string]any{
"type": "input_audio",
"input_audio": part.InputAudio,
})
case dto.ContentTypeFile:
contentParts = append(contentParts, map[string]any{
"type": "input_file",
"file": part.File,
})
case dto.ContentTypeVideoUrl:
contentParts = append(contentParts, map[string]any{
"type": "input_video",
"video_url": part.VideoUrl,
})
default:
// Best-effort: keep unknown parts as-is to avoid silently dropping context.
contentParts = append(contentParts, map[string]any{
"type": part.Type,
})
}
}
item["content"] = contentParts
inputItems = append(inputItems, item)
}
inputRaw, err := common.Marshal(inputItems)
if err != nil {
return nil, err
}
var instructionsRaw json.RawMessage
if len(instructionsParts) > 0 {
instructions := strings.Join(instructionsParts, "\n\n")
instructionsRaw, _ = common.Marshal(instructions)
}
var toolsRaw json.RawMessage
if req.Tools != nil {
tools := make([]map[string]any, 0, len(req.Tools))
for _, tool := range req.Tools {
switch tool.Type {
case "function":
tools = append(tools, map[string]any{
"type": "function",
"name": tool.Function.Name,
"description": tool.Function.Description,
"parameters": tool.Function.Parameters,
})
default:
// Best-effort: keep original tool shape for unknown types.
var m map[string]any
if b, err := common.Marshal(tool); err == nil {
_ = common.Unmarshal(b, &m)
}
if len(m) == 0 {
m = map[string]any{"type": tool.Type}
}
tools = append(tools, m)
}
}
toolsRaw, _ = common.Marshal(tools)
}
var toolChoiceRaw json.RawMessage
if req.ToolChoice != nil {
switch v := req.ToolChoice.(type) {
case string:
toolChoiceRaw, _ = common.Marshal(v)
default:
var m map[string]any
if b, err := common.Marshal(v); err == nil {
_ = common.Unmarshal(b, &m)
}
if m == nil {
toolChoiceRaw, _ = common.Marshal(v)
} else if t, _ := m["type"].(string); t == "function" {
// Chat: {"type":"function","function":{"name":"..."}}
// Responses: {"type":"function","name":"..."}
if name, ok := m["name"].(string); ok && name != "" {
toolChoiceRaw, _ = common.Marshal(map[string]any{
"type": "function",
"name": name,
})
} else if fn, ok := m["function"].(map[string]any); ok {
if name, ok := fn["name"].(string); ok && name != "" {
toolChoiceRaw, _ = common.Marshal(map[string]any{
"type": "function",
"name": name,
})
} else {
toolChoiceRaw, _ = common.Marshal(v)
}
} else {
toolChoiceRaw, _ = common.Marshal(v)
}
} else {
toolChoiceRaw, _ = common.Marshal(v)
}
}
}
var parallelToolCallsRaw json.RawMessage
if req.ParallelTooCalls != nil {
parallelToolCallsRaw, _ = common.Marshal(*req.ParallelTooCalls)
}
var textRaw json.RawMessage
if req.ResponseFormat != nil && req.ResponseFormat.Type != "" {
textRaw, _ = common.Marshal(map[string]any{
"format": req.ResponseFormat,
})
}
maxOutputTokens := req.MaxTokens
if req.MaxCompletionTokens > maxOutputTokens {
maxOutputTokens = req.MaxCompletionTokens
}
var topP *float64
if req.TopP != 0 {
topP = common.GetPointer(req.TopP)
}
out := &dto.OpenAIResponsesRequest{
Model: req.Model,
Input: inputRaw,
Instructions: instructionsRaw,
MaxOutputTokens: maxOutputTokens,
Stream: req.Stream,
Temperature: req.Temperature,
Text: textRaw,
ToolChoice: toolChoiceRaw,
Tools: toolsRaw,
TopP: topP,
User: req.User,
ParallelToolCalls: parallelToolCallsRaw,
Store: req.Store,
Metadata: req.Metadata,
}
if req.ReasoningEffort != "" && req.ReasoningEffort != "none" {
out.Reasoning = &dto.Reasoning{
Effort: req.ReasoningEffort,
}
}
return out, nil
}

View File

@@ -0,0 +1,18 @@
package openaicompat
import "github.com/QuantumNous/new-api/setting/model_setting"
func ShouldChatCompletionsUseResponsesPolicy(policy model_setting.ChatCompletionsToResponsesPolicy, channelID int, model string) bool {
if !policy.IsChannelEnabled(channelID) {
return false
}
return matchAnyRegex(policy.ModelPatterns, model)
}
func ShouldChatCompletionsUseResponsesGlobal(channelID int, model string) bool {
return ShouldChatCompletionsUseResponsesPolicy(
model_setting.GetGlobalSettings().ChatCompletionsToResponsesPolicy,
channelID,
model,
)
}

View File

@@ -0,0 +1,33 @@
package openaicompat
import (
"regexp"
"sync"
)
var compiledRegexCache sync.Map // map[string]*regexp.Regexp
func matchAnyRegex(patterns []string, s string) bool {
if len(patterns) == 0 || s == "" {
return false
}
for _, pattern := range patterns {
if pattern == "" {
continue
}
re, ok := compiledRegexCache.Load(pattern)
if !ok {
compiled, err := regexp.Compile(pattern)
if err != nil {
// Treat invalid patterns as non-matching to avoid breaking runtime traffic.
continue
}
re = compiled
compiledRegexCache.Store(pattern, re)
}
if re.(*regexp.Regexp).MatchString(s) {
return true
}
}
return false
}

View File

@@ -0,0 +1,133 @@
package openaicompat
import (
"errors"
"strings"
"github.com/QuantumNous/new-api/dto"
)
func ResponsesResponseToChatCompletionsResponse(resp *dto.OpenAIResponsesResponse, id string) (*dto.OpenAITextResponse, *dto.Usage, error) {
if resp == nil {
return nil, nil, errors.New("response is nil")
}
text := ExtractOutputTextFromResponses(resp)
usage := &dto.Usage{}
if resp.Usage != nil {
if resp.Usage.InputTokens != 0 {
usage.PromptTokens = resp.Usage.InputTokens
usage.InputTokens = resp.Usage.InputTokens
}
if resp.Usage.OutputTokens != 0 {
usage.CompletionTokens = resp.Usage.OutputTokens
usage.OutputTokens = resp.Usage.OutputTokens
}
if resp.Usage.TotalTokens != 0 {
usage.TotalTokens = resp.Usage.TotalTokens
} else {
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
}
if resp.Usage.InputTokensDetails != nil {
usage.PromptTokensDetails.CachedTokens = resp.Usage.InputTokensDetails.CachedTokens
usage.PromptTokensDetails.ImageTokens = resp.Usage.InputTokensDetails.ImageTokens
usage.PromptTokensDetails.AudioTokens = resp.Usage.InputTokensDetails.AudioTokens
}
if resp.Usage.CompletionTokenDetails.ReasoningTokens != 0 {
usage.CompletionTokenDetails.ReasoningTokens = resp.Usage.CompletionTokenDetails.ReasoningTokens
}
}
created := resp.CreatedAt
var toolCalls []dto.ToolCallResponse
if text == "" && len(resp.Output) > 0 {
for _, out := range resp.Output {
if out.Type != "function_call" {
continue
}
name := strings.TrimSpace(out.Name)
if name == "" {
continue
}
callId := strings.TrimSpace(out.CallId)
if callId == "" {
callId = strings.TrimSpace(out.ID)
}
toolCalls = append(toolCalls, dto.ToolCallResponse{
ID: callId,
Type: "function",
Function: dto.FunctionResponse{
Name: name,
Arguments: out.Arguments,
},
})
}
}
finishReason := "stop"
if len(toolCalls) > 0 {
finishReason = "tool_calls"
}
msg := dto.Message{
Role: "assistant",
Content: text,
}
if len(toolCalls) > 0 {
msg.SetToolCalls(toolCalls)
msg.Content = ""
}
out := &dto.OpenAITextResponse{
Id: id,
Object: "chat.completion",
Created: created,
Model: resp.Model,
Choices: []dto.OpenAITextResponseChoice{
{
Index: 0,
Message: msg,
FinishReason: finishReason,
},
},
Usage: *usage,
}
return out, usage, nil
}
func ExtractOutputTextFromResponses(resp *dto.OpenAIResponsesResponse) string {
if resp == nil || len(resp.Output) == 0 {
return ""
}
var sb strings.Builder
// Prefer assistant message outputs.
for _, out := range resp.Output {
if out.Type != "message" {
continue
}
if out.Role != "" && out.Role != "assistant" {
continue
}
for _, c := range out.Content {
if c.Type == "output_text" && c.Text != "" {
sb.WriteString(c.Text)
}
}
}
if sb.Len() > 0 {
return sb.String()
}
for _, out := range resp.Output {
for _, c := range out.Content {
if c.Text != "" {
sb.WriteString(c.Text)
}
}
}
return sb.String()
}