feat: logs show reject reason

This commit is contained in:
Seefs
2026-01-25 14:52:18 +08:00
parent 68d9a227dd
commit 00c5d9ffdf
7 changed files with 33 additions and 0 deletions

View File

@@ -55,4 +55,8 @@ const (
ContextKeyLocalCountTokens ContextKey = "local_count_tokens" ContextKeyLocalCountTokens ContextKey = "local_count_tokens"
ContextKeySystemPromptOverride ContextKey = "system_prompt_override" ContextKeySystemPromptOverride ContextKey = "system_prompt_override"
// ContextKeyAdminRejectReason stores an admin-only reject/block reason extracted from upstream responses.
// It is not returned to end users, but can be persisted into consume/error logs for debugging.
ContextKeyAdminRejectReason ContextKey = "admin_reject_reason"
) )

View File

@@ -59,6 +59,7 @@ func formatUserLogs(logs []*Log) {
// Remove admin-only debug fields. // Remove admin-only debug fields.
delete(otherMap, "admin_info") delete(otherMap, "admin_info")
delete(otherMap, "request_conversion") delete(otherMap, "request_conversion")
delete(otherMap, "reject_reason")
} }
logs[i].Other = common.MapToJsonStr(otherMap) logs[i].Other = common.MapToJsonStr(otherMap)
logs[i].Id = logs[i].Id % 1024 logs[i].Id = logs[i].Id % 1024

View File

@@ -1,10 +1,12 @@
package gemini package gemini
import ( import (
"fmt"
"io" "io"
"net/http" "net/http"
"github.com/QuantumNous/new-api/common" "github.com/QuantumNous/new-api/common"
"github.com/QuantumNous/new-api/constant"
"github.com/QuantumNous/new-api/dto" "github.com/QuantumNous/new-api/dto"
"github.com/QuantumNous/new-api/logger" "github.com/QuantumNous/new-api/logger"
relaycommon "github.com/QuantumNous/new-api/relay/common" relaycommon "github.com/QuantumNous/new-api/relay/common"
@@ -35,6 +37,10 @@ func GeminiTextGenerationHandler(c *gin.Context, info *relaycommon.RelayInfo, re
return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
} }
if len(geminiResponse.Candidates) == 0 && geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
}
// 计算使用量(基于 UsageMetadata // 计算使用量(基于 UsageMetadata
usage := dto.Usage{ usage := dto.Usage{
PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount, PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,

View File

@@ -1197,6 +1197,10 @@ func geminiStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http
return false return false
} }
if len(geminiResponse.Candidates) == 0 && geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
}
// 统计图片数量 // 统计图片数量
for _, candidate := range geminiResponse.Candidates { for _, candidate := range geminiResponse.Candidates {
for _, part := range candidate.Content.Parts { for _, part := range candidate.Content.Parts {
@@ -1372,12 +1376,14 @@ func GeminiChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.R
var newAPIError *types.NewAPIError var newAPIError *types.NewAPIError
if geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil { if geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
newAPIError = types.NewOpenAIError( newAPIError = types.NewOpenAIError(
errors.New("request blocked by Gemini API: "+*geminiResponse.PromptFeedback.BlockReason), errors.New("request blocked by Gemini API: "+*geminiResponse.PromptFeedback.BlockReason),
types.ErrorCodePromptBlocked, types.ErrorCodePromptBlocked,
http.StatusBadRequest, http.StatusBadRequest,
) )
} else { } else {
common.SetContextKey(c, constant.ContextKeyAdminRejectReason, "gemini_empty_candidates")
newAPIError = types.NewOpenAIError( newAPIError = types.NewOpenAIError(
errors.New("empty response from Gemini API"), errors.New("empty response from Gemini API"),
types.ErrorCodeEmptyResponse, types.ErrorCodeEmptyResponse,

View File

@@ -229,6 +229,13 @@ func OpenaiHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Respo
return nil, types.WithOpenAIError(*oaiError, resp.StatusCode) return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
} }
for _, choice := range simpleResponse.Choices {
if choice.FinishReason == constant.FinishReasonContentFilter {
common.SetContextKey(c, constant.ContextKeyAdminRejectReason, "openai_finish_reason=content_filter")
break
}
}
forceFormat := false forceFormat := false
if info.ChannelSetting.ForceFormat { if info.ChannelSetting.ForceFormat {
forceFormat = true forceFormat = true

View File

@@ -237,6 +237,9 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, usage
} }
extraContent = append(extraContent, "上游无计费信息") extraContent = append(extraContent, "上游无计费信息")
} }
adminRejectReason := common.GetContextKeyString(ctx, constant.ContextKeyAdminRejectReason)
useTimeSeconds := time.Now().Unix() - relayInfo.StartTime.Unix() useTimeSeconds := time.Now().Unix() - relayInfo.StartTime.Unix()
promptTokens := usage.PromptTokens promptTokens := usage.PromptTokens
cacheTokens := usage.PromptTokensDetails.CachedTokens cacheTokens := usage.PromptTokensDetails.CachedTokens
@@ -461,6 +464,9 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, usage
} }
logContent := strings.Join(extraContent, ", ") logContent := strings.Join(extraContent, ", ")
other := service.GenerateTextOtherInfo(ctx, relayInfo, modelRatio, groupRatio, completionRatio, cacheTokens, cacheRatio, modelPrice, relayInfo.PriceData.GroupRatioInfo.GroupSpecialRatio) other := service.GenerateTextOtherInfo(ctx, relayInfo, modelRatio, groupRatio, completionRatio, cacheTokens, cacheRatio, modelPrice, relayInfo.PriceData.GroupRatioInfo.GroupSpecialRatio)
if adminRejectReason != "" {
other["reject_reason"] = adminRejectReason
}
// For chat-based calls to the Claude model, tagging is required. Using Claude's rendering logs, the two approaches handle input rendering differently. // For chat-based calls to the Claude model, tagging is required. Using Claude's rendering logs, the two approaches handle input rendering differently.
if isClaudeUsageSemantic { if isClaudeUsageSemantic {
other["claude"] = true other["claude"] = true

View File

@@ -578,6 +578,9 @@ export const getLogsColumns = ({
other?.is_system_prompt_overwritten, other?.is_system_prompt_overwritten,
'openai', 'openai',
); );
if (isAdminUser && other?.reject_reason) {
content += `\nBlock reason: ${other.reject_reason}`;
}
return ( return (
<Typography.Paragraph <Typography.Paragraph
ellipsis={{ ellipsis={{