mirror of
https://github.com/Wei-Shaw/claude-relay-service.git
synced 2026-01-22 16:43:35 +00:00
feat: 实现 Antigravity OAuth 账户支持与路径分流
This commit is contained in:
126
src/utils/anthropicRequestDump.js
Normal file
126
src/utils/anthropicRequestDump.js
Normal file
@@ -0,0 +1,126 @@
|
||||
const fs = require('fs/promises')
|
||||
const path = require('path')
|
||||
const logger = require('./logger')
|
||||
const { getProjectRoot } = require('./projectPaths')
|
||||
|
||||
const REQUEST_DUMP_ENV = 'ANTHROPIC_DEBUG_REQUEST_DUMP'
|
||||
const REQUEST_DUMP_MAX_BYTES_ENV = 'ANTHROPIC_DEBUG_REQUEST_DUMP_MAX_BYTES'
|
||||
const REQUEST_DUMP_FILENAME = 'anthropic-requests-dump.jsonl'
|
||||
|
||||
function isEnabled() {
|
||||
const raw = process.env[REQUEST_DUMP_ENV]
|
||||
if (!raw) {
|
||||
return false
|
||||
}
|
||||
return raw === '1' || raw.toLowerCase() === 'true'
|
||||
}
|
||||
|
||||
function getMaxBytes() {
|
||||
const raw = process.env[REQUEST_DUMP_MAX_BYTES_ENV]
|
||||
if (!raw) {
|
||||
return 2 * 1024 * 1024
|
||||
}
|
||||
const parsed = Number.parseInt(raw, 10)
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return 2 * 1024 * 1024
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
function maskSecret(value) {
|
||||
if (value === null || value === undefined) {
|
||||
return value
|
||||
}
|
||||
const str = String(value)
|
||||
if (str.length <= 8) {
|
||||
return '***'
|
||||
}
|
||||
return `${str.slice(0, 4)}...${str.slice(-4)}`
|
||||
}
|
||||
|
||||
function sanitizeHeaders(headers) {
|
||||
const sensitive = new Set([
|
||||
'authorization',
|
||||
'proxy-authorization',
|
||||
'x-api-key',
|
||||
'cookie',
|
||||
'set-cookie',
|
||||
'x-forwarded-for',
|
||||
'x-real-ip'
|
||||
])
|
||||
|
||||
const out = {}
|
||||
for (const [k, v] of Object.entries(headers || {})) {
|
||||
const key = k.toLowerCase()
|
||||
if (sensitive.has(key)) {
|
||||
out[key] = maskSecret(v)
|
||||
continue
|
||||
}
|
||||
out[key] = v
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
function safeJsonStringify(payload, maxBytes) {
|
||||
let json = ''
|
||||
try {
|
||||
json = JSON.stringify(payload)
|
||||
} catch (e) {
|
||||
return JSON.stringify({
|
||||
type: 'anthropic_request_dump_error',
|
||||
error: 'JSON.stringify_failed',
|
||||
message: e?.message || String(e)
|
||||
})
|
||||
}
|
||||
|
||||
if (Buffer.byteLength(json, 'utf8') <= maxBytes) {
|
||||
return json
|
||||
}
|
||||
|
||||
const truncated = Buffer.from(json, 'utf8').subarray(0, maxBytes).toString('utf8')
|
||||
return JSON.stringify({
|
||||
type: 'anthropic_request_dump_truncated',
|
||||
maxBytes,
|
||||
originalBytes: Buffer.byteLength(json, 'utf8'),
|
||||
partialJson: truncated
|
||||
})
|
||||
}
|
||||
|
||||
async function dumpAnthropicMessagesRequest(req, meta = {}) {
|
||||
if (!isEnabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
const maxBytes = getMaxBytes()
|
||||
const filename = path.join(getProjectRoot(), REQUEST_DUMP_FILENAME)
|
||||
|
||||
const record = {
|
||||
ts: new Date().toISOString(),
|
||||
requestId: req?.requestId || null,
|
||||
method: req?.method || null,
|
||||
url: req?.originalUrl || req?.url || null,
|
||||
ip: req?.ip || null,
|
||||
meta,
|
||||
headers: sanitizeHeaders(req?.headers || {}),
|
||||
body: req?.body || null
|
||||
}
|
||||
|
||||
const line = `${safeJsonStringify(record, maxBytes)}\n`
|
||||
|
||||
try {
|
||||
await fs.appendFile(filename, line, { encoding: 'utf8' })
|
||||
} catch (e) {
|
||||
logger.warn('Failed to dump Anthropic request', {
|
||||
filename,
|
||||
requestId: req?.requestId || null,
|
||||
error: e?.message || String(e)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dumpAnthropicMessagesRequest,
|
||||
REQUEST_DUMP_ENV,
|
||||
REQUEST_DUMP_MAX_BYTES_ENV,
|
||||
REQUEST_DUMP_FILENAME
|
||||
}
|
||||
125
src/utils/anthropicResponseDump.js
Normal file
125
src/utils/anthropicResponseDump.js
Normal file
@@ -0,0 +1,125 @@
|
||||
const fs = require('fs/promises')
|
||||
const path = require('path')
|
||||
const logger = require('./logger')
|
||||
const { getProjectRoot } = require('./projectPaths')
|
||||
|
||||
const RESPONSE_DUMP_ENV = 'ANTHROPIC_DEBUG_RESPONSE_DUMP'
|
||||
const RESPONSE_DUMP_MAX_BYTES_ENV = 'ANTHROPIC_DEBUG_RESPONSE_DUMP_MAX_BYTES'
|
||||
const RESPONSE_DUMP_FILENAME = 'anthropic-responses-dump.jsonl'
|
||||
|
||||
function isEnabled() {
|
||||
const raw = process.env[RESPONSE_DUMP_ENV]
|
||||
if (!raw) {
|
||||
return false
|
||||
}
|
||||
return raw === '1' || raw.toLowerCase() === 'true'
|
||||
}
|
||||
|
||||
function getMaxBytes() {
|
||||
const raw = process.env[RESPONSE_DUMP_MAX_BYTES_ENV]
|
||||
if (!raw) {
|
||||
return 2 * 1024 * 1024
|
||||
}
|
||||
const parsed = Number.parseInt(raw, 10)
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return 2 * 1024 * 1024
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
function safeJsonStringify(payload, maxBytes) {
|
||||
let json = ''
|
||||
try {
|
||||
json = JSON.stringify(payload)
|
||||
} catch (e) {
|
||||
return JSON.stringify({
|
||||
type: 'anthropic_response_dump_error',
|
||||
error: 'JSON.stringify_failed',
|
||||
message: e?.message || String(e)
|
||||
})
|
||||
}
|
||||
|
||||
if (Buffer.byteLength(json, 'utf8') <= maxBytes) {
|
||||
return json
|
||||
}
|
||||
|
||||
const truncated = Buffer.from(json, 'utf8').subarray(0, maxBytes).toString('utf8')
|
||||
return JSON.stringify({
|
||||
type: 'anthropic_response_dump_truncated',
|
||||
maxBytes,
|
||||
originalBytes: Buffer.byteLength(json, 'utf8'),
|
||||
partialJson: truncated
|
||||
})
|
||||
}
|
||||
|
||||
function summarizeAnthropicResponseBody(body) {
|
||||
const content = Array.isArray(body?.content) ? body.content : []
|
||||
const toolUses = content.filter((b) => b && b.type === 'tool_use')
|
||||
const texts = content
|
||||
.filter((b) => b && b.type === 'text' && typeof b.text === 'string')
|
||||
.map((b) => b.text)
|
||||
.join('')
|
||||
|
||||
return {
|
||||
id: body?.id || null,
|
||||
model: body?.model || null,
|
||||
stop_reason: body?.stop_reason || null,
|
||||
usage: body?.usage || null,
|
||||
content_blocks: content.map((b) => (b ? b.type : null)).filter(Boolean),
|
||||
tool_use_names: toolUses.map((b) => b.name).filter(Boolean),
|
||||
text_preview: texts ? texts.slice(0, 800) : ''
|
||||
}
|
||||
}
|
||||
|
||||
async function dumpAnthropicResponse(req, responseInfo, meta = {}) {
|
||||
if (!isEnabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
const maxBytes = getMaxBytes()
|
||||
const filename = path.join(getProjectRoot(), RESPONSE_DUMP_FILENAME)
|
||||
|
||||
const record = {
|
||||
ts: new Date().toISOString(),
|
||||
requestId: req?.requestId || null,
|
||||
url: req?.originalUrl || req?.url || null,
|
||||
meta,
|
||||
response: responseInfo
|
||||
}
|
||||
|
||||
const line = `${safeJsonStringify(record, maxBytes)}\n`
|
||||
try {
|
||||
await fs.appendFile(filename, line, { encoding: 'utf8' })
|
||||
} catch (e) {
|
||||
logger.warn('Failed to dump Anthropic response', {
|
||||
filename,
|
||||
requestId: req?.requestId || null,
|
||||
error: e?.message || String(e)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function dumpAnthropicNonStreamResponse(req, statusCode, body, meta = {}) {
|
||||
return dumpAnthropicResponse(
|
||||
req,
|
||||
{ kind: 'non-stream', statusCode, summary: summarizeAnthropicResponseBody(body), body },
|
||||
meta
|
||||
)
|
||||
}
|
||||
|
||||
async function dumpAnthropicStreamSummary(req, summary, meta = {}) {
|
||||
return dumpAnthropicResponse(req, { kind: 'stream', summary }, meta)
|
||||
}
|
||||
|
||||
async function dumpAnthropicStreamError(req, error, meta = {}) {
|
||||
return dumpAnthropicResponse(req, { kind: 'stream-error', error }, meta)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dumpAnthropicNonStreamResponse,
|
||||
dumpAnthropicStreamSummary,
|
||||
dumpAnthropicStreamError,
|
||||
RESPONSE_DUMP_ENV,
|
||||
RESPONSE_DUMP_MAX_BYTES_ENV,
|
||||
RESPONSE_DUMP_FILENAME
|
||||
}
|
||||
138
src/utils/antigravityModel.js
Normal file
138
src/utils/antigravityModel.js
Normal file
@@ -0,0 +1,138 @@
|
||||
const DEFAULT_ANTIGRAVITY_MODEL = 'gemini-2.5-flash'
|
||||
|
||||
const UPSTREAM_TO_ALIAS = {
|
||||
'rev19-uic3-1p': 'gemini-2.5-computer-use-preview-10-2025',
|
||||
'gemini-3-pro-image': 'gemini-3-pro-image-preview',
|
||||
'gemini-3-pro-high': 'gemini-3-pro-preview',
|
||||
'gemini-3-flash': 'gemini-3-flash-preview',
|
||||
'claude-sonnet-4-5': 'gemini-claude-sonnet-4-5',
|
||||
'claude-sonnet-4-5-thinking': 'gemini-claude-sonnet-4-5-thinking',
|
||||
'claude-opus-4-5-thinking': 'gemini-claude-opus-4-5-thinking',
|
||||
chat_20706: '',
|
||||
chat_23310: '',
|
||||
'gemini-2.5-flash-thinking': '',
|
||||
'gemini-3-pro-low': '',
|
||||
'gemini-2.5-pro': ''
|
||||
}
|
||||
|
||||
const ALIAS_TO_UPSTREAM = {
|
||||
'gemini-2.5-computer-use-preview-10-2025': 'rev19-uic3-1p',
|
||||
'gemini-3-pro-image-preview': 'gemini-3-pro-image',
|
||||
'gemini-3-pro-preview': 'gemini-3-pro-high',
|
||||
'gemini-3-flash-preview': 'gemini-3-flash',
|
||||
'gemini-claude-sonnet-4-5': 'claude-sonnet-4-5',
|
||||
'gemini-claude-sonnet-4-5-thinking': 'claude-sonnet-4-5-thinking',
|
||||
'gemini-claude-opus-4-5-thinking': 'claude-opus-4-5-thinking'
|
||||
}
|
||||
|
||||
const ANTIGRAVITY_MODEL_METADATA = {
|
||||
'gemini-2.5-flash': {
|
||||
thinking: { min: 0, max: 24576, zeroAllowed: true, dynamicAllowed: true },
|
||||
name: 'models/gemini-2.5-flash'
|
||||
},
|
||||
'gemini-2.5-flash-lite': {
|
||||
thinking: { min: 0, max: 24576, zeroAllowed: true, dynamicAllowed: true },
|
||||
name: 'models/gemini-2.5-flash-lite'
|
||||
},
|
||||
'gemini-2.5-computer-use-preview-10-2025': {
|
||||
name: 'models/gemini-2.5-computer-use-preview-10-2025'
|
||||
},
|
||||
'gemini-3-pro-preview': {
|
||||
thinking: {
|
||||
min: 128,
|
||||
max: 32768,
|
||||
zeroAllowed: false,
|
||||
dynamicAllowed: true,
|
||||
levels: ['low', 'high']
|
||||
},
|
||||
name: 'models/gemini-3-pro-preview'
|
||||
},
|
||||
'gemini-3-pro-image-preview': {
|
||||
thinking: {
|
||||
min: 128,
|
||||
max: 32768,
|
||||
zeroAllowed: false,
|
||||
dynamicAllowed: true,
|
||||
levels: ['low', 'high']
|
||||
},
|
||||
name: 'models/gemini-3-pro-image-preview'
|
||||
},
|
||||
'gemini-3-flash-preview': {
|
||||
thinking: {
|
||||
min: 128,
|
||||
max: 32768,
|
||||
zeroAllowed: false,
|
||||
dynamicAllowed: true,
|
||||
levels: ['minimal', 'low', 'medium', 'high']
|
||||
},
|
||||
name: 'models/gemini-3-flash-preview'
|
||||
},
|
||||
'gemini-claude-sonnet-4-5-thinking': {
|
||||
thinking: { min: 1024, max: 200000, zeroAllowed: false, dynamicAllowed: true },
|
||||
maxCompletionTokens: 64000
|
||||
},
|
||||
'gemini-claude-opus-4-5-thinking': {
|
||||
thinking: { min: 1024, max: 200000, zeroAllowed: false, dynamicAllowed: true },
|
||||
maxCompletionTokens: 64000
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeAntigravityModelInput(model, defaultModel = DEFAULT_ANTIGRAVITY_MODEL) {
|
||||
if (!model) {
|
||||
return defaultModel
|
||||
}
|
||||
return model.startsWith('models/') ? model.slice('models/'.length) : model
|
||||
}
|
||||
|
||||
function getAntigravityModelAlias(modelName) {
|
||||
const normalized = normalizeAntigravityModelInput(modelName)
|
||||
if (Object.prototype.hasOwnProperty.call(UPSTREAM_TO_ALIAS, normalized)) {
|
||||
return UPSTREAM_TO_ALIAS[normalized]
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
function getAntigravityModelMetadata(modelName) {
|
||||
const normalized = normalizeAntigravityModelInput(modelName)
|
||||
if (Object.prototype.hasOwnProperty.call(ANTIGRAVITY_MODEL_METADATA, normalized)) {
|
||||
return ANTIGRAVITY_MODEL_METADATA[normalized]
|
||||
}
|
||||
if (normalized.startsWith('claude-')) {
|
||||
const prefixed = `gemini-${normalized}`
|
||||
if (Object.prototype.hasOwnProperty.call(ANTIGRAVITY_MODEL_METADATA, prefixed)) {
|
||||
return ANTIGRAVITY_MODEL_METADATA[prefixed]
|
||||
}
|
||||
const thinkingAlias = `${prefixed}-thinking`
|
||||
if (Object.prototype.hasOwnProperty.call(ANTIGRAVITY_MODEL_METADATA, thinkingAlias)) {
|
||||
return ANTIGRAVITY_MODEL_METADATA[thinkingAlias]
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function mapAntigravityUpstreamModel(model) {
|
||||
const normalized = normalizeAntigravityModelInput(model)
|
||||
let upstream = Object.prototype.hasOwnProperty.call(ALIAS_TO_UPSTREAM, normalized)
|
||||
? ALIAS_TO_UPSTREAM[normalized]
|
||||
: normalized
|
||||
|
||||
if (upstream.startsWith('gemini-claude-')) {
|
||||
upstream = upstream.replace(/^gemini-/, '')
|
||||
}
|
||||
|
||||
const mapping = {
|
||||
// Opus:上游更常见的是 thinking 变体(CLIProxyAPI 也按此处理)
|
||||
'claude-opus-4-5': 'claude-opus-4-5-thinking',
|
||||
// Gemini thinking 变体回退
|
||||
'gemini-2.5-flash-thinking': 'gemini-2.5-flash'
|
||||
}
|
||||
|
||||
return mapping[upstream] || upstream
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
normalizeAntigravityModelInput,
|
||||
getAntigravityModelAlias,
|
||||
getAntigravityModelMetadata,
|
||||
mapAntigravityUpstreamModel
|
||||
}
|
||||
121
src/utils/antigravityUpstreamDump.js
Normal file
121
src/utils/antigravityUpstreamDump.js
Normal file
@@ -0,0 +1,121 @@
|
||||
const fs = require('fs/promises')
|
||||
const path = require('path')
|
||||
const logger = require('./logger')
|
||||
const { getProjectRoot } = require('./projectPaths')
|
||||
|
||||
const UPSTREAM_REQUEST_DUMP_ENV = 'ANTIGRAVITY_DEBUG_UPSTREAM_REQUEST_DUMP'
|
||||
const UPSTREAM_REQUEST_DUMP_MAX_BYTES_ENV = 'ANTIGRAVITY_DEBUG_UPSTREAM_REQUEST_DUMP_MAX_BYTES'
|
||||
const UPSTREAM_REQUEST_DUMP_FILENAME = 'antigravity-upstream-requests-dump.jsonl'
|
||||
|
||||
function isEnabled() {
|
||||
const raw = process.env[UPSTREAM_REQUEST_DUMP_ENV]
|
||||
if (!raw) {
|
||||
return false
|
||||
}
|
||||
const normalized = String(raw).trim().toLowerCase()
|
||||
return normalized === '1' || normalized === 'true'
|
||||
}
|
||||
|
||||
function getMaxBytes() {
|
||||
const raw = process.env[UPSTREAM_REQUEST_DUMP_MAX_BYTES_ENV]
|
||||
if (!raw) {
|
||||
return 2 * 1024 * 1024
|
||||
}
|
||||
const parsed = Number.parseInt(raw, 10)
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return 2 * 1024 * 1024
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
function redact(value) {
|
||||
if (!value) {
|
||||
return value
|
||||
}
|
||||
const s = String(value)
|
||||
if (s.length <= 10) {
|
||||
return '***'
|
||||
}
|
||||
return `${s.slice(0, 3)}...${s.slice(-4)}`
|
||||
}
|
||||
|
||||
function safeJsonStringify(payload, maxBytes) {
|
||||
let json = ''
|
||||
try {
|
||||
json = JSON.stringify(payload)
|
||||
} catch (e) {
|
||||
return JSON.stringify({
|
||||
type: 'antigravity_upstream_dump_error',
|
||||
error: 'JSON.stringify_failed',
|
||||
message: e?.message || String(e)
|
||||
})
|
||||
}
|
||||
|
||||
if (Buffer.byteLength(json, 'utf8') <= maxBytes) {
|
||||
return json
|
||||
}
|
||||
|
||||
const truncated = Buffer.from(json, 'utf8').subarray(0, maxBytes).toString('utf8')
|
||||
return JSON.stringify({
|
||||
type: 'antigravity_upstream_dump_truncated',
|
||||
maxBytes,
|
||||
originalBytes: Buffer.byteLength(json, 'utf8'),
|
||||
partialJson: truncated
|
||||
})
|
||||
}
|
||||
|
||||
async function dumpAntigravityUpstreamRequest(requestInfo) {
|
||||
if (!isEnabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
const maxBytes = getMaxBytes()
|
||||
const filename = path.join(getProjectRoot(), UPSTREAM_REQUEST_DUMP_FILENAME)
|
||||
|
||||
const record = {
|
||||
ts: new Date().toISOString(),
|
||||
type: 'antigravity_upstream_request',
|
||||
requestId: requestInfo?.requestId || null,
|
||||
model: requestInfo?.model || null,
|
||||
stream: Boolean(requestInfo?.stream),
|
||||
url: requestInfo?.url || null,
|
||||
baseUrl: requestInfo?.baseUrl || null,
|
||||
params: requestInfo?.params || null,
|
||||
headers: requestInfo?.headers
|
||||
? {
|
||||
Host: requestInfo.headers.Host || requestInfo.headers.host || null,
|
||||
'User-Agent':
|
||||
requestInfo.headers['User-Agent'] || requestInfo.headers['user-agent'] || null,
|
||||
Authorization: (() => {
|
||||
const raw = requestInfo.headers.Authorization || requestInfo.headers.authorization
|
||||
if (!raw) {
|
||||
return null
|
||||
}
|
||||
const value = String(raw)
|
||||
const m = value.match(/^Bearer\\s+(.+)$/i)
|
||||
const token = m ? m[1] : value
|
||||
return `Bearer ${redact(token)}`
|
||||
})()
|
||||
}
|
||||
: null,
|
||||
envelope: requestInfo?.envelope || null
|
||||
}
|
||||
|
||||
const line = `${safeJsonStringify(record, maxBytes)}\n`
|
||||
try {
|
||||
await fs.appendFile(filename, line, { encoding: 'utf8' })
|
||||
} catch (e) {
|
||||
logger.warn('Failed to dump Antigravity upstream request', {
|
||||
filename,
|
||||
requestId: requestInfo?.requestId || null,
|
||||
error: e?.message || String(e)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dumpAntigravityUpstreamRequest,
|
||||
UPSTREAM_REQUEST_DUMP_ENV,
|
||||
UPSTREAM_REQUEST_DUMP_MAX_BYTES_ENV,
|
||||
UPSTREAM_REQUEST_DUMP_FILENAME
|
||||
}
|
||||
@@ -55,16 +55,69 @@ function sanitizeUpstreamError(errorData) {
|
||||
return errorData
|
||||
}
|
||||
|
||||
// 深拷贝避免修改原始对象
|
||||
const sanitized = JSON.parse(JSON.stringify(errorData))
|
||||
// AxiosError / Error:返回摘要,避免泄露请求体/headers/token 等敏感信息
|
||||
const looksLikeAxiosError =
|
||||
errorData.isAxiosError ||
|
||||
(errorData.name === 'AxiosError' && (errorData.config || errorData.response))
|
||||
const looksLikeError = errorData instanceof Error || typeof errorData.message === 'string'
|
||||
|
||||
if (looksLikeAxiosError || looksLikeError) {
|
||||
const statusCode = errorData.response?.status
|
||||
const upstreamBody = errorData.response?.data
|
||||
const upstreamMessage = sanitizeErrorMessage(extractErrorMessage(upstreamBody) || '')
|
||||
|
||||
return {
|
||||
name: errorData.name || 'Error',
|
||||
code: errorData.code,
|
||||
statusCode,
|
||||
message: sanitizeErrorMessage(errorData.message || ''),
|
||||
upstreamMessage: upstreamMessage || undefined,
|
||||
upstreamType: upstreamBody?.error?.type || upstreamBody?.error?.status || undefined
|
||||
}
|
||||
}
|
||||
|
||||
// 递归清理嵌套的错误对象
|
||||
const visited = new WeakSet()
|
||||
|
||||
const shouldRedactKey = (key) => {
|
||||
if (!key) {
|
||||
return false
|
||||
}
|
||||
const lowerKey = String(key).toLowerCase()
|
||||
return (
|
||||
lowerKey === 'authorization' ||
|
||||
lowerKey === 'cookie' ||
|
||||
lowerKey.includes('api_key') ||
|
||||
lowerKey.includes('apikey') ||
|
||||
lowerKey.includes('access_token') ||
|
||||
lowerKey.includes('refresh_token') ||
|
||||
lowerKey.endsWith('token') ||
|
||||
lowerKey.includes('secret') ||
|
||||
lowerKey.includes('password')
|
||||
)
|
||||
}
|
||||
|
||||
const sanitizeObject = (obj) => {
|
||||
if (!obj || typeof obj !== 'object') {
|
||||
return obj
|
||||
}
|
||||
|
||||
if (visited.has(obj)) {
|
||||
return '[Circular]'
|
||||
}
|
||||
visited.add(obj)
|
||||
|
||||
// 主动剔除常见“超大且敏感”的字段
|
||||
if (obj.config || obj.request || obj.response) {
|
||||
return '[Redacted]'
|
||||
}
|
||||
|
||||
for (const key in obj) {
|
||||
if (shouldRedactKey(key)) {
|
||||
obj[key] = '[REDACTED]'
|
||||
continue
|
||||
}
|
||||
|
||||
// 清理所有字符串字段,不仅仅是 message
|
||||
if (typeof obj[key] === 'string') {
|
||||
obj[key] = sanitizeErrorMessage(obj[key])
|
||||
@@ -76,7 +129,9 @@ function sanitizeUpstreamError(errorData) {
|
||||
return obj
|
||||
}
|
||||
|
||||
return sanitizeObject(sanitized)
|
||||
// 尽量不修改原对象:浅拷贝后递归清理
|
||||
const clone = Array.isArray(errorData) ? [...errorData] : { ...errorData }
|
||||
return sanitizeObject(clone)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
265
src/utils/geminiSchemaCleaner.js
Normal file
265
src/utils/geminiSchemaCleaner.js
Normal file
@@ -0,0 +1,265 @@
|
||||
function appendHint(description, hint) {
|
||||
if (!hint) {
|
||||
return description || ''
|
||||
}
|
||||
if (!description) {
|
||||
return hint
|
||||
}
|
||||
return `${description} (${hint})`
|
||||
}
|
||||
|
||||
function getRefHint(refValue) {
|
||||
const ref = String(refValue || '')
|
||||
if (!ref) {
|
||||
return ''
|
||||
}
|
||||
const idx = ref.lastIndexOf('/')
|
||||
const name = idx >= 0 ? ref.slice(idx + 1) : ref
|
||||
return name ? `See: ${name}` : ''
|
||||
}
|
||||
|
||||
function normalizeType(typeValue) {
|
||||
if (typeof typeValue === 'string' && typeValue) {
|
||||
return { type: typeValue, hint: '' }
|
||||
}
|
||||
if (!Array.isArray(typeValue) || typeValue.length === 0) {
|
||||
return { type: '', hint: '' }
|
||||
}
|
||||
const raw = typeValue.map((t) => (t === null || t === undefined ? '' : String(t))).filter(Boolean)
|
||||
const hasNull = raw.includes('null')
|
||||
const nonNull = raw.filter((t) => t !== 'null')
|
||||
const primary = nonNull[0] || 'string'
|
||||
const hintParts = []
|
||||
if (nonNull.length > 1) {
|
||||
hintParts.push(`Accepts: ${nonNull.join(' | ')}`)
|
||||
}
|
||||
if (hasNull) {
|
||||
hintParts.push('nullable')
|
||||
}
|
||||
return { type: primary, hint: hintParts.join('; ') }
|
||||
}
|
||||
|
||||
const CONSTRAINT_KEYS = [
|
||||
'minLength',
|
||||
'maxLength',
|
||||
'exclusiveMinimum',
|
||||
'exclusiveMaximum',
|
||||
'pattern',
|
||||
'minItems',
|
||||
'maxItems'
|
||||
]
|
||||
|
||||
function scoreSchema(schema) {
|
||||
if (!schema || typeof schema !== 'object') {
|
||||
return { score: 0, type: '' }
|
||||
}
|
||||
const t = typeof schema.type === 'string' ? schema.type : ''
|
||||
if (t === 'object' || (schema.properties && typeof schema.properties === 'object')) {
|
||||
return { score: 3, type: t || 'object' }
|
||||
}
|
||||
if (t === 'array' || schema.items) {
|
||||
return { score: 2, type: t || 'array' }
|
||||
}
|
||||
if (t && t !== 'null') {
|
||||
return { score: 1, type: t }
|
||||
}
|
||||
return { score: 0, type: t || 'null' }
|
||||
}
|
||||
|
||||
function pickBestFromAlternatives(alternatives) {
|
||||
let bestIndex = 0
|
||||
let bestScore = -1
|
||||
const types = []
|
||||
for (let i = 0; i < alternatives.length; i += 1) {
|
||||
const alt = alternatives[i]
|
||||
const { score, type } = scoreSchema(alt)
|
||||
if (type) {
|
||||
types.push(type)
|
||||
}
|
||||
if (score > bestScore) {
|
||||
bestScore = score
|
||||
bestIndex = i
|
||||
}
|
||||
}
|
||||
return { best: alternatives[bestIndex], types: Array.from(new Set(types)).filter(Boolean) }
|
||||
}
|
||||
|
||||
function cleanJsonSchemaForGemini(schema) {
|
||||
if (schema === null || schema === undefined) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
if (typeof schema !== 'object') {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
if (Array.isArray(schema)) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
// $ref:Gemini/Antigravity 不支持,转换为 hint
|
||||
if (typeof schema.$ref === 'string' && schema.$ref) {
|
||||
return {
|
||||
type: 'object',
|
||||
description: appendHint(schema.description || '', getRefHint(schema.$ref)),
|
||||
properties: {}
|
||||
}
|
||||
}
|
||||
|
||||
// anyOf / oneOf:选择最可能的 schema,保留类型提示
|
||||
const anyOf = Array.isArray(schema.anyOf) ? schema.anyOf : null
|
||||
const oneOf = Array.isArray(schema.oneOf) ? schema.oneOf : null
|
||||
const alts = anyOf && anyOf.length ? anyOf : oneOf && oneOf.length ? oneOf : null
|
||||
if (alts) {
|
||||
const { best, types } = pickBestFromAlternatives(alts)
|
||||
const cleaned = cleanJsonSchemaForGemini(best)
|
||||
const mergedDescription = appendHint(cleaned.description || '', schema.description || '')
|
||||
const typeHint = types.length > 1 ? `Accepts: ${types.join(' || ')}` : ''
|
||||
return {
|
||||
...cleaned,
|
||||
description: appendHint(mergedDescription, typeHint)
|
||||
}
|
||||
}
|
||||
|
||||
// allOf:合并 properties/required
|
||||
if (Array.isArray(schema.allOf) && schema.allOf.length) {
|
||||
const merged = {}
|
||||
let mergedDesc = schema.description || ''
|
||||
const mergedReq = new Set()
|
||||
const mergedProps = {}
|
||||
for (const item of schema.allOf) {
|
||||
const cleaned = cleanJsonSchemaForGemini(item)
|
||||
if (cleaned.description) {
|
||||
mergedDesc = appendHint(mergedDesc, cleaned.description)
|
||||
}
|
||||
if (Array.isArray(cleaned.required)) {
|
||||
for (const r of cleaned.required) {
|
||||
if (typeof r === 'string' && r) {
|
||||
mergedReq.add(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (cleaned.properties && typeof cleaned.properties === 'object') {
|
||||
Object.assign(mergedProps, cleaned.properties)
|
||||
}
|
||||
if (cleaned.type && !merged.type) {
|
||||
merged.type = cleaned.type
|
||||
}
|
||||
if (cleaned.items && !merged.items) {
|
||||
merged.items = cleaned.items
|
||||
}
|
||||
if (Array.isArray(cleaned.enum) && !merged.enum) {
|
||||
merged.enum = cleaned.enum
|
||||
}
|
||||
}
|
||||
if (Object.keys(mergedProps).length) {
|
||||
merged.type = merged.type || 'object'
|
||||
merged.properties = mergedProps
|
||||
const req = Array.from(mergedReq).filter((r) => mergedProps[r])
|
||||
if (req.length) {
|
||||
merged.required = req
|
||||
}
|
||||
}
|
||||
if (mergedDesc) {
|
||||
merged.description = mergedDesc
|
||||
}
|
||||
return cleanJsonSchemaForGemini(merged)
|
||||
}
|
||||
|
||||
const result = {}
|
||||
const constraintHints = []
|
||||
|
||||
// description
|
||||
if (typeof schema.description === 'string') {
|
||||
result.description = schema.description
|
||||
}
|
||||
|
||||
for (const key of CONSTRAINT_KEYS) {
|
||||
const value = schema[key]
|
||||
if (value === undefined || value === null || typeof value === 'object') {
|
||||
continue
|
||||
}
|
||||
constraintHints.push(`${key}: ${value}`)
|
||||
}
|
||||
|
||||
// const -> enum
|
||||
if (schema.const !== undefined && !Array.isArray(schema.enum)) {
|
||||
result.enum = [schema.const]
|
||||
}
|
||||
|
||||
// enum
|
||||
if (Array.isArray(schema.enum)) {
|
||||
const en = schema.enum.filter(
|
||||
(v) => typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean'
|
||||
)
|
||||
if (en.length) {
|
||||
result.enum = en
|
||||
}
|
||||
}
|
||||
|
||||
// type(flatten 数组 type)
|
||||
const { type: normalizedType, hint: typeHint } = normalizeType(schema.type)
|
||||
if (normalizedType) {
|
||||
result.type = normalizedType
|
||||
}
|
||||
if (typeHint) {
|
||||
result.description = appendHint(result.description || '', typeHint)
|
||||
}
|
||||
|
||||
if (result.enum && result.enum.length > 1 && result.enum.length <= 10) {
|
||||
const list = result.enum.map((item) => String(item)).join(', ')
|
||||
result.description = appendHint(result.description || '', `Allowed: ${list}`)
|
||||
}
|
||||
|
||||
if (constraintHints.length) {
|
||||
result.description = appendHint(result.description || '', constraintHints.join(', '))
|
||||
}
|
||||
|
||||
// additionalProperties:Gemini/Antigravity 不接受布尔值,直接删除并用 hint 记录
|
||||
if (schema.additionalProperties === false) {
|
||||
result.description = appendHint(result.description || '', 'No extra properties allowed')
|
||||
}
|
||||
|
||||
// properties
|
||||
if (
|
||||
schema.properties &&
|
||||
typeof schema.properties === 'object' &&
|
||||
!Array.isArray(schema.properties)
|
||||
) {
|
||||
const props = {}
|
||||
for (const [name, propSchema] of Object.entries(schema.properties)) {
|
||||
props[name] = cleanJsonSchemaForGemini(propSchema)
|
||||
}
|
||||
result.type = result.type || 'object'
|
||||
result.properties = props
|
||||
}
|
||||
|
||||
// items
|
||||
if (schema.items !== undefined) {
|
||||
result.type = result.type || 'array'
|
||||
result.items = cleanJsonSchemaForGemini(schema.items)
|
||||
}
|
||||
|
||||
// required(最后再清理无效字段)
|
||||
if (Array.isArray(schema.required) && result.properties) {
|
||||
const req = schema.required.filter(
|
||||
(r) =>
|
||||
typeof r === 'string' && r && Object.prototype.hasOwnProperty.call(result.properties, r)
|
||||
)
|
||||
if (req.length) {
|
||||
result.required = req
|
||||
}
|
||||
}
|
||||
|
||||
// 只保留 Gemini 兼容字段:其他($schema/$id/$defs/definitions/format/constraints/pattern...)一律丢弃
|
||||
|
||||
if (!result.type) {
|
||||
result.type = result.properties ? 'object' : result.items ? 'array' : 'object'
|
||||
}
|
||||
if (result.type === 'object' && !result.properties) {
|
||||
result.properties = {}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cleanJsonSchemaForGemini
|
||||
}
|
||||
@@ -5,6 +5,10 @@
|
||||
* Supports parsing model strings like "ccr,model_name" to extract vendor type and base model.
|
||||
*/
|
||||
|
||||
// 仅保留原仓库既有的模型前缀:CCR 路由
|
||||
// Gemini/Antigravity 采用“路径分流”,避免在 model 字段里混入 vendor 前缀造成混乱
|
||||
const SUPPORTED_VENDOR_PREFIXES = ['ccr']
|
||||
|
||||
/**
|
||||
* Parse vendor-prefixed model string
|
||||
* @param {string} modelStr - Model string, potentially with vendor prefix (e.g., "ccr,gemini-2.5-pro")
|
||||
@@ -19,16 +23,21 @@ function parseVendorPrefixedModel(modelStr) {
|
||||
const trimmed = modelStr.trim()
|
||||
const lowerTrimmed = trimmed.toLowerCase()
|
||||
|
||||
// Check for ccr prefix (case insensitive)
|
||||
if (lowerTrimmed.startsWith('ccr,')) {
|
||||
for (const vendorPrefix of SUPPORTED_VENDOR_PREFIXES) {
|
||||
if (!lowerTrimmed.startsWith(`${vendorPrefix},`)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const parts = trimmed.split(',')
|
||||
if (parts.length >= 2) {
|
||||
// Extract base model (everything after the first comma, rejoined in case model name contains commas)
|
||||
const baseModel = parts.slice(1).join(',').trim()
|
||||
return {
|
||||
vendor: 'ccr',
|
||||
baseModel
|
||||
}
|
||||
if (parts.length < 2) {
|
||||
break
|
||||
}
|
||||
|
||||
// Extract base model (everything after the first comma, rejoined in case model name contains commas)
|
||||
const baseModel = parts.slice(1).join(',').trim()
|
||||
return {
|
||||
vendor: vendorPrefix,
|
||||
baseModel
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
10
src/utils/projectPaths.js
Normal file
10
src/utils/projectPaths.js
Normal file
@@ -0,0 +1,10 @@
|
||||
const path = require('path')
|
||||
|
||||
// 该文件位于 src/utils 下,向上两级即项目根目录。
|
||||
function getProjectRoot() {
|
||||
return path.resolve(__dirname, '..', '..')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getProjectRoot
|
||||
}
|
||||
Reference in New Issue
Block a user