mirror of
https://github.com/QuantumNous/new-api.git
synced 2026-04-19 08:07:27 +00:00
* feat: codex channel * feat: codex channel * feat: codex oauth flow * feat: codex refresh cred * feat: codex usage * fix: codex err message detail * fix: codex setting ui * feat: codex refresh cred task * fix: import err * fix: codex store must be false * fix: chat -> responses tool call * fix: chat -> responses tool call
370 lines
11 KiB
Go
370 lines
11 KiB
Go
package openai
|
|
|
|
import (
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/QuantumNous/new-api/common"
|
|
"github.com/QuantumNous/new-api/dto"
|
|
"github.com/QuantumNous/new-api/logger"
|
|
relaycommon "github.com/QuantumNous/new-api/relay/common"
|
|
"github.com/QuantumNous/new-api/relay/helper"
|
|
"github.com/QuantumNous/new-api/service"
|
|
"github.com/QuantumNous/new-api/types"
|
|
|
|
"github.com/gin-gonic/gin"
|
|
)
|
|
|
|
func OaiResponsesToChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
|
|
if resp == nil || resp.Body == nil {
|
|
return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
}
|
|
|
|
defer service.CloseResponseBodyGracefully(resp)
|
|
|
|
var responsesResp dto.OpenAIResponsesResponse
|
|
body, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
|
|
}
|
|
|
|
if err := common.Unmarshal(body, &responsesResp); err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
|
|
}
|
|
|
|
if oaiError := responsesResp.GetOpenAIError(); oaiError != nil && oaiError.Type != "" {
|
|
return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
|
|
}
|
|
|
|
chatId := helper.GetResponseID(c)
|
|
chatResp, usage, err := service.ResponsesResponseToChatCompletionsResponse(&responsesResp, chatId)
|
|
if err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
|
|
}
|
|
|
|
if usage == nil || usage.TotalTokens == 0 {
|
|
text := service.ExtractOutputTextFromResponses(&responsesResp)
|
|
usage = service.ResponseText2Usage(c, text, info.UpstreamModelName, info.GetEstimatePromptTokens())
|
|
chatResp.Usage = *usage
|
|
}
|
|
|
|
chatBody, err := common.Marshal(chatResp)
|
|
if err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeJsonMarshalFailed, http.StatusInternalServerError)
|
|
}
|
|
|
|
service.IOCopyBytesGracefully(c, resp, chatBody)
|
|
return usage, nil
|
|
}
|
|
|
|
func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
|
|
if resp == nil || resp.Body == nil {
|
|
return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
}
|
|
|
|
defer service.CloseResponseBodyGracefully(resp)
|
|
|
|
responseId := helper.GetResponseID(c)
|
|
createAt := time.Now().Unix()
|
|
model := info.UpstreamModelName
|
|
|
|
var (
|
|
usage = &dto.Usage{}
|
|
outputText strings.Builder
|
|
usageText strings.Builder
|
|
sentStart bool
|
|
sentStop bool
|
|
sawToolCall bool
|
|
streamErr *types.NewAPIError
|
|
)
|
|
|
|
toolCallIndexByID := make(map[string]int)
|
|
toolCallNameByID := make(map[string]string)
|
|
toolCallArgsByID := make(map[string]string)
|
|
toolCallNameSent := make(map[string]bool)
|
|
toolCallCanonicalIDByItemID := make(map[string]string)
|
|
|
|
sendStartIfNeeded := func() bool {
|
|
if sentStart {
|
|
return true
|
|
}
|
|
if err := helper.ObjectData(c, helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)); err != nil {
|
|
streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
return false
|
|
}
|
|
sentStart = true
|
|
return true
|
|
}
|
|
|
|
sendToolCallDelta := func(callID string, name string, argsDelta string) bool {
|
|
if callID == "" {
|
|
return true
|
|
}
|
|
if outputText.Len() > 0 {
|
|
// Prefer streaming assistant text over tool calls to match non-stream behavior.
|
|
return true
|
|
}
|
|
if !sendStartIfNeeded() {
|
|
return false
|
|
}
|
|
|
|
idx, ok := toolCallIndexByID[callID]
|
|
if !ok {
|
|
idx = len(toolCallIndexByID)
|
|
toolCallIndexByID[callID] = idx
|
|
}
|
|
if name != "" {
|
|
toolCallNameByID[callID] = name
|
|
}
|
|
if toolCallNameByID[callID] != "" {
|
|
name = toolCallNameByID[callID]
|
|
}
|
|
|
|
tool := dto.ToolCallResponse{
|
|
ID: callID,
|
|
Type: "function",
|
|
Function: dto.FunctionResponse{
|
|
Arguments: argsDelta,
|
|
},
|
|
}
|
|
tool.SetIndex(idx)
|
|
if name != "" && !toolCallNameSent[callID] {
|
|
tool.Function.Name = name
|
|
toolCallNameSent[callID] = true
|
|
}
|
|
|
|
chunk := &dto.ChatCompletionsStreamResponse{
|
|
Id: responseId,
|
|
Object: "chat.completion.chunk",
|
|
Created: createAt,
|
|
Model: model,
|
|
Choices: []dto.ChatCompletionsStreamResponseChoice{
|
|
{
|
|
Index: 0,
|
|
Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
|
|
ToolCalls: []dto.ToolCallResponse{tool},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
if err := helper.ObjectData(c, chunk); err != nil {
|
|
streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
return false
|
|
}
|
|
sawToolCall = true
|
|
|
|
// Include tool call data in the local builder for fallback token estimation.
|
|
if tool.Function.Name != "" {
|
|
usageText.WriteString(tool.Function.Name)
|
|
}
|
|
if argsDelta != "" {
|
|
usageText.WriteString(argsDelta)
|
|
}
|
|
return true
|
|
}
|
|
|
|
helper.StreamScannerHandler(c, resp, info, func(data string) bool {
|
|
if streamErr != nil {
|
|
return false
|
|
}
|
|
|
|
var streamResp dto.ResponsesStreamResponse
|
|
if err := common.UnmarshalJsonStr(data, &streamResp); err != nil {
|
|
logger.LogError(c, "failed to unmarshal responses stream event: "+err.Error())
|
|
return true
|
|
}
|
|
|
|
switch streamResp.Type {
|
|
case "response.created":
|
|
if streamResp.Response != nil {
|
|
if streamResp.Response.Model != "" {
|
|
model = streamResp.Response.Model
|
|
}
|
|
if streamResp.Response.CreatedAt != 0 {
|
|
createAt = int64(streamResp.Response.CreatedAt)
|
|
}
|
|
}
|
|
|
|
case "response.output_text.delta":
|
|
if !sendStartIfNeeded() {
|
|
return false
|
|
}
|
|
|
|
if streamResp.Delta != "" {
|
|
outputText.WriteString(streamResp.Delta)
|
|
usageText.WriteString(streamResp.Delta)
|
|
delta := streamResp.Delta
|
|
chunk := &dto.ChatCompletionsStreamResponse{
|
|
Id: responseId,
|
|
Object: "chat.completion.chunk",
|
|
Created: createAt,
|
|
Model: model,
|
|
Choices: []dto.ChatCompletionsStreamResponseChoice{
|
|
{
|
|
Index: 0,
|
|
Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
|
|
Content: &delta,
|
|
},
|
|
},
|
|
},
|
|
}
|
|
if err := helper.ObjectData(c, chunk); err != nil {
|
|
streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
return false
|
|
}
|
|
}
|
|
|
|
case "response.output_item.added", "response.output_item.done":
|
|
if streamResp.Item == nil {
|
|
break
|
|
}
|
|
if streamResp.Item.Type != "function_call" {
|
|
break
|
|
}
|
|
|
|
itemID := strings.TrimSpace(streamResp.Item.ID)
|
|
callID := strings.TrimSpace(streamResp.Item.CallId)
|
|
if callID == "" {
|
|
callID = itemID
|
|
}
|
|
if itemID != "" && callID != "" {
|
|
toolCallCanonicalIDByItemID[itemID] = callID
|
|
}
|
|
name := strings.TrimSpace(streamResp.Item.Name)
|
|
if name != "" {
|
|
toolCallNameByID[callID] = name
|
|
}
|
|
|
|
newArgs := streamResp.Item.Arguments
|
|
prevArgs := toolCallArgsByID[callID]
|
|
argsDelta := ""
|
|
if newArgs != "" {
|
|
if strings.HasPrefix(newArgs, prevArgs) {
|
|
argsDelta = newArgs[len(prevArgs):]
|
|
} else {
|
|
argsDelta = newArgs
|
|
}
|
|
toolCallArgsByID[callID] = newArgs
|
|
}
|
|
|
|
if !sendToolCallDelta(callID, name, argsDelta) {
|
|
return false
|
|
}
|
|
|
|
case "response.function_call_arguments.delta":
|
|
itemID := strings.TrimSpace(streamResp.ItemID)
|
|
callID := toolCallCanonicalIDByItemID[itemID]
|
|
if callID == "" {
|
|
callID = itemID
|
|
}
|
|
if callID == "" {
|
|
break
|
|
}
|
|
toolCallArgsByID[callID] += streamResp.Delta
|
|
if !sendToolCallDelta(callID, "", streamResp.Delta) {
|
|
return false
|
|
}
|
|
|
|
case "response.function_call_arguments.done":
|
|
|
|
case "response.completed":
|
|
if streamResp.Response != nil {
|
|
if streamResp.Response.Model != "" {
|
|
model = streamResp.Response.Model
|
|
}
|
|
if streamResp.Response.CreatedAt != 0 {
|
|
createAt = int64(streamResp.Response.CreatedAt)
|
|
}
|
|
if streamResp.Response.Usage != nil {
|
|
if streamResp.Response.Usage.InputTokens != 0 {
|
|
usage.PromptTokens = streamResp.Response.Usage.InputTokens
|
|
usage.InputTokens = streamResp.Response.Usage.InputTokens
|
|
}
|
|
if streamResp.Response.Usage.OutputTokens != 0 {
|
|
usage.CompletionTokens = streamResp.Response.Usage.OutputTokens
|
|
usage.OutputTokens = streamResp.Response.Usage.OutputTokens
|
|
}
|
|
if streamResp.Response.Usage.TotalTokens != 0 {
|
|
usage.TotalTokens = streamResp.Response.Usage.TotalTokens
|
|
} else {
|
|
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
|
}
|
|
if streamResp.Response.Usage.InputTokensDetails != nil {
|
|
usage.PromptTokensDetails.CachedTokens = streamResp.Response.Usage.InputTokensDetails.CachedTokens
|
|
usage.PromptTokensDetails.ImageTokens = streamResp.Response.Usage.InputTokensDetails.ImageTokens
|
|
usage.PromptTokensDetails.AudioTokens = streamResp.Response.Usage.InputTokensDetails.AudioTokens
|
|
}
|
|
if streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens != 0 {
|
|
usage.CompletionTokenDetails.ReasoningTokens = streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens
|
|
}
|
|
}
|
|
}
|
|
|
|
if !sendStartIfNeeded() {
|
|
return false
|
|
}
|
|
if !sentStop {
|
|
finishReason := "stop"
|
|
if sawToolCall && outputText.Len() == 0 {
|
|
finishReason = "tool_calls"
|
|
}
|
|
stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
|
|
if err := helper.ObjectData(c, stop); err != nil {
|
|
streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
return false
|
|
}
|
|
sentStop = true
|
|
}
|
|
|
|
case "response.error", "response.failed":
|
|
if streamResp.Response != nil {
|
|
if oaiErr := streamResp.Response.GetOpenAIError(); oaiErr != nil && oaiErr.Type != "" {
|
|
streamErr = types.WithOpenAIError(*oaiErr, http.StatusInternalServerError)
|
|
return false
|
|
}
|
|
}
|
|
streamErr = types.NewOpenAIError(fmt.Errorf("responses stream error: %s", streamResp.Type), types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
return false
|
|
|
|
default:
|
|
}
|
|
|
|
return true
|
|
})
|
|
|
|
if streamErr != nil {
|
|
return nil, streamErr
|
|
}
|
|
|
|
if usage.TotalTokens == 0 {
|
|
usage = service.ResponseText2Usage(c, usageText.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
|
|
}
|
|
|
|
if !sentStart {
|
|
if err := helper.ObjectData(c, helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)); err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
}
|
|
}
|
|
if !sentStop {
|
|
finishReason := "stop"
|
|
if sawToolCall && outputText.Len() == 0 {
|
|
finishReason = "tool_calls"
|
|
}
|
|
stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
|
|
if err := helper.ObjectData(c, stop); err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
}
|
|
}
|
|
if info.ShouldIncludeUsage && usage != nil {
|
|
if err := helper.ObjectData(c, helper.GenerateFinalUsageResponse(responseId, createAt, model, *usage)); err != nil {
|
|
return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
|
|
}
|
|
}
|
|
|
|
helper.Done(c)
|
|
return usage, nil
|
|
}
|