Merge pull request #2837 from seefs001/fix/chat2responses_reasoning

fix: map Responses reasoning stream to chat completion deltas
This commit is contained in:
Calcium-Ion
2026-02-05 01:56:12 +08:00
committed by GitHub
4 changed files with 128 additions and 4 deletions

View File

@@ -352,6 +352,11 @@ type ResponsesOutputContent struct {
Annotations []interface{} `json:"annotations"`
}
type ResponsesReasoningSummaryPart struct {
Type string `json:"type"`
Text string `json:"text"`
}
const (
BuildInToolWebSearchPreview = "web_search_preview"
BuildInToolFileSearch = "file_search"
@@ -374,8 +379,11 @@ type ResponsesStreamResponse struct {
Item *ResponsesOutput `json:"item,omitempty"`
// - response.function_call_arguments.delta
// - response.function_call_arguments.done
OutputIndex *int `json:"output_index,omitempty"`
ItemID string `json:"item_id,omitempty"`
OutputIndex *int `json:"output_index,omitempty"`
ContentIndex *int `json:"content_index,omitempty"`
SummaryIndex *int `json:"summary_index,omitempty"`
ItemID string `json:"item_id,omitempty"`
Part *ResponsesReasoningSummaryPart `json:"part,omitempty"`
}
// GetOpenAIError 从动态错误类型中提取OpenAIError结构

View File

@@ -585,6 +585,9 @@ func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommo
}
request.Model = originModel
}
if info != nil && request.Reasoning != nil && request.Reasoning.Effort != "" {
info.ReasoningEffort = request.Reasoning.Effort
}
return request, nil
}

View File

@@ -18,6 +18,26 @@ import (
"github.com/gin-gonic/gin"
)
func responsesStreamIndexKey(itemID string, idx *int) string {
if itemID == "" {
return ""
}
if idx == nil {
return itemID
}
return fmt.Sprintf("%s:%d", itemID, *idx)
}
func stringDeltaFromPrefix(prev string, next string) string {
if next == "" {
return ""
}
if prev != "" && strings.HasPrefix(next, prev) {
return next[len(prev):]
}
return next
}
func OaiResponsesToChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
if resp == nil || resp.Body == nil {
return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
@@ -86,6 +106,7 @@ func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo
toolCallArgsByID := make(map[string]string)
toolCallNameSent := make(map[string]bool)
toolCallCanonicalIDByItemID := make(map[string]string)
//reasoningSummaryTextByKey := make(map[string]string)
sendStartIfNeeded := func() bool {
if sentStart {
@@ -99,6 +120,66 @@ func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo
return true
}
//sendReasoningDelta := func(delta string) bool {
// if delta == "" {
// return true
// }
// if !sendStartIfNeeded() {
// return false
// }
//
// usageText.WriteString(delta)
// chunk := &dto.ChatCompletionsStreamResponse{
// Id: responseId,
// Object: "chat.completion.chunk",
// Created: createAt,
// Model: model,
// Choices: []dto.ChatCompletionsStreamResponseChoice{
// {
// Index: 0,
// Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
// ReasoningContent: &delta,
// },
// },
// },
// }
// if err := helper.ObjectData(c, chunk); err != nil {
// streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
// return false
// }
// return true
//}
sendReasoningSummaryDelta := func(delta string) bool {
if delta == "" {
return true
}
if !sendStartIfNeeded() {
return false
}
usageText.WriteString(delta)
chunk := &dto.ChatCompletionsStreamResponse{
Id: responseId,
Object: "chat.completion.chunk",
Created: createAt,
Model: model,
Choices: []dto.ChatCompletionsStreamResponseChoice{
{
Index: 0,
Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
ReasoningContent: &delta,
},
},
},
}
if err := helper.ObjectData(c, chunk); err != nil {
streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
return false
}
return true
}
sendToolCallDelta := func(callID string, name string, argsDelta string) bool {
if callID == "" {
return true
@@ -188,6 +269,37 @@ func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo
}
}
//case "response.reasoning_text.delta":
//if !sendReasoningDelta(streamResp.Delta) {
// return false
//}
//case "response.reasoning_text.done":
case "response.reasoning_summary_text.delta":
if !sendReasoningSummaryDelta(streamResp.Delta) {
return false
}
case "response.reasoning_summary_text.done":
//case "response.reasoning_summary_part.added", "response.reasoning_summary_part.done":
// key := responsesStreamIndexKey(strings.TrimSpace(streamResp.ItemID), streamResp.SummaryIndex)
// if key == "" || streamResp.Part == nil {
// break
// }
// // Only handle summary text parts, ignore other part types.
// if streamResp.Part.Type != "" && streamResp.Part.Type != "summary_text" {
// break
// }
// prev := reasoningSummaryTextByKey[key]
// next := streamResp.Part.Text
// delta := stringDeltaFromPrefix(prev, next)
// reasoningSummaryTextByKey[key] = next
// if !sendReasoningSummaryDelta(delta) {
// return false
// }
case "response.output_text.delta":
if !sendStartIfNeeded() {
return false

View File

@@ -346,9 +346,10 @@ func ChatCompletionsRequestToResponsesRequest(req *dto.GeneralOpenAIRequest) (*d
Metadata: req.Metadata,
}
if req.ReasoningEffort != "" && req.ReasoningEffort != "none" {
if req.ReasoningEffort != "" {
out.Reasoning = &dto.Reasoning{
Effort: req.ReasoningEffort,
Effort: req.ReasoningEffort,
Summary: "detailed",
}
}