feat: 新增支持Azure OpenAI账户

This commit is contained in:
shaw
2025-08-23 10:30:29 +08:00
parent e4e72ba5e9
commit 74bcb99142
11 changed files with 1884 additions and 7 deletions

View File

@@ -20,6 +20,7 @@ class ApiKeyService {
claudeConsoleAccountId = null,
geminiAccountId = null,
openaiAccountId = null,
azureOpenaiAccountId = null,
bedrockAccountId = null, // 添加 Bedrock 账号ID支持
permissions = 'all', // 'claude', 'gemini', 'openai', 'all'
isActive = true,
@@ -53,6 +54,7 @@ class ApiKeyService {
claudeConsoleAccountId: claudeConsoleAccountId || '',
geminiAccountId: geminiAccountId || '',
openaiAccountId: openaiAccountId || '',
azureOpenaiAccountId: azureOpenaiAccountId || '',
bedrockAccountId: bedrockAccountId || '', // 添加 Bedrock 账号ID
permissions: permissions || 'all',
enableModelRestriction: String(enableModelRestriction),
@@ -86,6 +88,7 @@ class ApiKeyService {
claudeConsoleAccountId: keyData.claudeConsoleAccountId,
geminiAccountId: keyData.geminiAccountId,
openaiAccountId: keyData.openaiAccountId,
azureOpenaiAccountId: keyData.azureOpenaiAccountId,
bedrockAccountId: keyData.bedrockAccountId, // 添加 Bedrock 账号ID
permissions: keyData.permissions,
enableModelRestriction: keyData.enableModelRestriction === 'true',
@@ -174,6 +177,7 @@ class ApiKeyService {
claudeConsoleAccountId: keyData.claudeConsoleAccountId,
geminiAccountId: keyData.geminiAccountId,
openaiAccountId: keyData.openaiAccountId,
azureOpenaiAccountId: keyData.azureOpenaiAccountId,
bedrockAccountId: keyData.bedrockAccountId, // 添加 Bedrock 账号ID
permissions: keyData.permissions || 'all',
tokenLimit: parseInt(keyData.tokenLimit),
@@ -308,6 +312,7 @@ class ApiKeyService {
'claudeConsoleAccountId',
'geminiAccountId',
'openaiAccountId',
'azureOpenaiAccountId',
'bedrockAccountId', // 添加 Bedrock 账号ID
'permissions',
'expiresAt',

View File

@@ -0,0 +1,479 @@
const redisClient = require('../models/redis')
const { v4: uuidv4 } = require('uuid')
const crypto = require('crypto')
const config = require('../../config/config')
const logger = require('../utils/logger')
// 加密相关常量
const ALGORITHM = 'aes-256-cbc'
const IV_LENGTH = 16
// 🚀 安全的加密密钥生成支持动态salt
const ENCRYPTION_SALT = config.security?.azureOpenaiSalt || 'azure-openai-account-default-salt'
class EncryptionKeyManager {
constructor() {
this.keyCache = new Map()
this.keyRotationInterval = 24 * 60 * 60 * 1000 // 24小时
}
getKey(version = 'current') {
const cached = this.keyCache.get(version)
if (cached && Date.now() - cached.timestamp < this.keyRotationInterval) {
return cached.key
}
// 生成新密钥
const key = crypto.scryptSync(config.security.encryptionKey, ENCRYPTION_SALT, 32)
this.keyCache.set(version, {
key,
timestamp: Date.now()
})
logger.debug('🔑 Azure OpenAI encryption key generated/refreshed')
return key
}
// 清理过期密钥
cleanup() {
const now = Date.now()
for (const [version, cached] of this.keyCache.entries()) {
if (now - cached.timestamp > this.keyRotationInterval) {
this.keyCache.delete(version)
}
}
}
}
const encryptionKeyManager = new EncryptionKeyManager()
// 定期清理过期密钥
setInterval(
() => {
encryptionKeyManager.cleanup()
},
60 * 60 * 1000
) // 每小时清理一次
// 生成加密密钥 - 使用安全的密钥管理器
function generateEncryptionKey() {
return encryptionKeyManager.getKey()
}
// Azure OpenAI 账户键前缀
const AZURE_OPENAI_ACCOUNT_KEY_PREFIX = 'azure_openai:account:'
const SHARED_AZURE_OPENAI_ACCOUNTS_KEY = 'shared_azure_openai_accounts'
const ACCOUNT_SESSION_MAPPING_PREFIX = 'azure_openai_session_account_mapping:'
// 加密函数
function encrypt(text) {
if (!text) {
return ''
}
const key = generateEncryptionKey()
const iv = crypto.randomBytes(IV_LENGTH)
const cipher = crypto.createCipheriv(ALGORITHM, key, iv)
let encrypted = cipher.update(text)
encrypted = Buffer.concat([encrypted, cipher.final()])
return `${iv.toString('hex')}:${encrypted.toString('hex')}`
}
// 解密函数 - 移除缓存以提高安全性
function decrypt(text) {
if (!text) {
return ''
}
try {
const key = generateEncryptionKey()
// IV 是固定长度的 32 个十六进制字符16 字节)
const ivHex = text.substring(0, 32)
const encryptedHex = text.substring(33) // 跳过冒号
if (ivHex.length !== 32 || !encryptedHex) {
throw new Error('Invalid encrypted text format')
}
const iv = Buffer.from(ivHex, 'hex')
const encryptedText = Buffer.from(encryptedHex, 'hex')
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv)
let decrypted = decipher.update(encryptedText)
decrypted = Buffer.concat([decrypted, decipher.final()])
const result = decrypted.toString()
return result
} catch (error) {
logger.error('Azure OpenAI decryption error:', error.message)
return ''
}
}
// 创建账户
async function createAccount(accountData) {
const accountId = uuidv4()
const now = new Date().toISOString()
const account = {
id: accountId,
name: accountData.name,
description: accountData.description || '',
accountType: accountData.accountType || 'shared',
groupId: accountData.groupId || null,
priority: accountData.priority || 50,
// Azure OpenAI 特有字段
azureEndpoint: accountData.azureEndpoint || '',
apiVersion: accountData.apiVersion || '2024-02-01', // 使用稳定版本
deploymentName: accountData.deploymentName || 'gpt-4', // 使用默认部署名称
apiKey: encrypt(accountData.apiKey || ''),
// 支持的模型
supportedModels: JSON.stringify(
accountData.supportedModels || ['gpt-4', 'gpt-4-turbo', 'gpt-35-turbo', 'gpt-35-turbo-16k']
),
// 状态字段
isActive: accountData.isActive !== false ? 'true' : 'false',
status: 'active',
schedulable: accountData.schedulable !== false ? 'true' : 'false',
createdAt: now,
updatedAt: now
}
// 代理配置
if (accountData.proxy) {
account.proxy =
typeof accountData.proxy === 'string' ? accountData.proxy : JSON.stringify(accountData.proxy)
}
const client = redisClient.getClientSafe()
await client.hset(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, account)
// 如果是共享账户,添加到共享账户集合
if (account.accountType === 'shared') {
await client.sadd(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
}
logger.info(`Created Azure OpenAI account: ${accountId}`)
return account
}
// 获取账户
async function getAccount(accountId) {
const client = redisClient.getClientSafe()
const accountData = await client.hgetall(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`)
if (!accountData || Object.keys(accountData).length === 0) {
return null
}
// 解密敏感数据(仅用于内部处理,不返回给前端)
if (accountData.apiKey) {
accountData.apiKey = decrypt(accountData.apiKey)
}
// 解析代理配置
if (accountData.proxy && typeof accountData.proxy === 'string') {
try {
accountData.proxy = JSON.parse(accountData.proxy)
} catch (e) {
accountData.proxy = null
}
}
// 解析支持的模型
if (accountData.supportedModels && typeof accountData.supportedModels === 'string') {
try {
accountData.supportedModels = JSON.parse(accountData.supportedModels)
} catch (e) {
accountData.supportedModels = ['gpt-4', 'gpt-35-turbo']
}
}
return accountData
}
// 更新账户
async function updateAccount(accountId, updates) {
const existingAccount = await getAccount(accountId)
if (!existingAccount) {
throw new Error('Account not found')
}
updates.updatedAt = new Date().toISOString()
// 加密敏感数据
if (updates.apiKey) {
updates.apiKey = encrypt(updates.apiKey)
}
// 处理代理配置
if (updates.proxy) {
updates.proxy =
typeof updates.proxy === 'string' ? updates.proxy : JSON.stringify(updates.proxy)
}
// 处理支持的模型
if (updates.supportedModels) {
updates.supportedModels =
typeof updates.supportedModels === 'string'
? updates.supportedModels
: JSON.stringify(updates.supportedModels)
}
// 更新账户类型时处理共享账户集合
const client = redisClient.getClientSafe()
if (updates.accountType && updates.accountType !== existingAccount.accountType) {
if (updates.accountType === 'shared') {
await client.sadd(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
} else {
await client.srem(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
}
}
await client.hset(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, updates)
logger.info(`Updated Azure OpenAI account: ${accountId}`)
// 合并更新后的账户数据
const updatedAccount = { ...existingAccount, ...updates }
// 返回时解析代理配置
if (updatedAccount.proxy && typeof updatedAccount.proxy === 'string') {
try {
updatedAccount.proxy = JSON.parse(updatedAccount.proxy)
} catch (e) {
updatedAccount.proxy = null
}
}
return updatedAccount
}
// 删除账户
async function deleteAccount(accountId) {
const client = redisClient.getClientSafe()
const accountKey = `${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`
// 从Redis中删除账户数据
await client.del(accountKey)
// 从共享账户集合中移除
await client.srem(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
logger.info(`Deleted Azure OpenAI account: ${accountId}`)
return true
}
// 获取所有账户
async function getAllAccounts() {
const client = redisClient.getClientSafe()
const keys = await client.keys(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}*`)
if (!keys || keys.length === 0) {
return []
}
const accounts = []
for (const key of keys) {
const accountData = await client.hgetall(key)
if (accountData && Object.keys(accountData).length > 0) {
// 不返回敏感数据给前端
delete accountData.apiKey
// 解析代理配置
if (accountData.proxy && typeof accountData.proxy === 'string') {
try {
accountData.proxy = JSON.parse(accountData.proxy)
} catch (e) {
accountData.proxy = null
}
}
// 解析支持的模型
if (accountData.supportedModels && typeof accountData.supportedModels === 'string') {
try {
accountData.supportedModels = JSON.parse(accountData.supportedModels)
} catch (e) {
accountData.supportedModels = ['gpt-4', 'gpt-35-turbo']
}
}
accounts.push(accountData)
}
}
return accounts
}
// 获取共享账户
async function getSharedAccounts() {
const client = redisClient.getClientSafe()
const accountIds = await client.smembers(SHARED_AZURE_OPENAI_ACCOUNTS_KEY)
if (!accountIds || accountIds.length === 0) {
return []
}
const accounts = []
for (const accountId of accountIds) {
const account = await getAccount(accountId)
if (account && account.isActive === 'true') {
accounts.push(account)
}
}
return accounts
}
// 选择可用账户
async function selectAvailableAccount(sessionId = null) {
// 如果有会话ID尝试获取之前分配的账户
if (sessionId) {
const client = redisClient.getClientSafe()
const mappingKey = `${ACCOUNT_SESSION_MAPPING_PREFIX}${sessionId}`
const accountId = await client.get(mappingKey)
if (accountId) {
const account = await getAccount(accountId)
if (account && account.isActive === 'true' && account.schedulable === 'true') {
logger.debug(`Reusing Azure OpenAI account ${accountId} for session ${sessionId}`)
return account
}
}
}
// 获取所有共享账户
const sharedAccounts = await getSharedAccounts()
// 过滤出可用的账户
const availableAccounts = sharedAccounts.filter(
(acc) => acc.isActive === 'true' && acc.schedulable === 'true'
)
if (availableAccounts.length === 0) {
throw new Error('No available Azure OpenAI accounts')
}
// 按优先级排序并选择
availableAccounts.sort((a, b) => (b.priority || 50) - (a.priority || 50))
const selectedAccount = availableAccounts[0]
// 如果有会话ID保存映射关系
if (sessionId && selectedAccount) {
const client = redisClient.getClientSafe()
const mappingKey = `${ACCOUNT_SESSION_MAPPING_PREFIX}${sessionId}`
await client.setex(mappingKey, 3600, selectedAccount.id) // 1小时过期
}
logger.debug(`Selected Azure OpenAI account: ${selectedAccount.id}`)
return selectedAccount
}
// 更新账户使用量
async function updateAccountUsage(accountId, tokens) {
const client = redisClient.getClientSafe()
const now = new Date().toISOString()
// 使用 HINCRBY 原子操作更新使用量
await client.hincrby(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, 'totalTokensUsed', tokens)
await client.hset(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, 'lastUsedAt', now)
logger.debug(`Updated Azure OpenAI account ${accountId} usage: ${tokens} tokens`)
}
// 健康检查单个账户
async function healthCheckAccount(accountId) {
try {
const account = await getAccount(accountId)
if (!account) {
return { id: accountId, status: 'error', message: 'Account not found' }
}
// 简单检查配置是否完整
if (!account.azureEndpoint || !account.apiKey || !account.deploymentName) {
return {
id: accountId,
status: 'error',
message: 'Incomplete configuration'
}
}
// 可以在这里添加实际的API调用测试
// 暂时返回成功状态
return {
id: accountId,
status: 'healthy',
message: 'Account is configured correctly'
}
} catch (error) {
logger.error(`Health check failed for Azure OpenAI account ${accountId}:`, error)
return {
id: accountId,
status: 'error',
message: error.message
}
}
}
// 批量健康检查
async function performHealthChecks() {
const accounts = await getAllAccounts()
const results = []
for (const account of accounts) {
const result = await healthCheckAccount(account.id)
results.push(result)
}
return results
}
// 切换账户的可调度状态
async function toggleSchedulable(accountId) {
const account = await getAccount(accountId)
if (!account) {
throw new Error('Account not found')
}
const newSchedulable = account.schedulable === 'true' ? 'false' : 'true'
await updateAccount(accountId, { schedulable: newSchedulable })
return {
id: accountId,
schedulable: newSchedulable === 'true'
}
}
// 迁移 API Keys 以支持 Azure OpenAI
async function migrateApiKeysForAzureSupport() {
const client = redisClient.getClientSafe()
const apiKeyIds = await client.smembers('api_keys')
let migratedCount = 0
for (const keyId of apiKeyIds) {
const keyData = await client.hgetall(`api_key:${keyId}`)
if (keyData && !keyData.azureOpenaiAccountId) {
// 添加 Azure OpenAI 账户ID字段初始为空
await client.hset(`api_key:${keyId}`, 'azureOpenaiAccountId', '')
migratedCount++
}
}
logger.info(`Migrated ${migratedCount} API keys for Azure OpenAI support`)
return migratedCount
}
module.exports = {
createAccount,
getAccount,
updateAccount,
deleteAccount,
getAllAccounts,
getSharedAccounts,
selectAvailableAccount,
updateAccountUsage,
healthCheckAccount,
performHealthChecks,
toggleSchedulable,
migrateApiKeysForAzureSupport,
encrypt,
decrypt
}

View File

@@ -0,0 +1,529 @@
const axios = require('axios')
const ProxyHelper = require('../utils/proxyHelper')
const logger = require('../utils/logger')
// 转换模型名称(去掉 azure/ 前缀)
function normalizeModelName(model) {
if (model && model.startsWith('azure/')) {
return model.replace('azure/', '')
}
return model
}
// 处理 Azure OpenAI 请求
async function handleAzureOpenAIRequest({
account,
requestBody,
headers: _headers = {}, // 前缀下划线表示未使用
isStream = false,
endpoint = 'chat/completions'
}) {
// 声明变量在函数顶部,确保在 catch 块中也能访问
let requestUrl = ''
let proxyAgent = null
let deploymentName = ''
try {
// 构建 Azure OpenAI 请求 URL
const baseUrl = account.azureEndpoint
deploymentName = account.deploymentName || 'default'
// Azure Responses API requires preview versions; fall back appropriately
const apiVersion =
account.apiVersion || (endpoint === 'responses' ? '2024-10-01-preview' : '2024-02-01')
if (endpoint === 'chat/completions') {
requestUrl = `${baseUrl}/openai/deployments/${deploymentName}/chat/completions?api-version=${apiVersion}`
} else if (endpoint === 'responses') {
requestUrl = `${baseUrl}/openai/responses?api-version=${apiVersion}`
} else {
requestUrl = `${baseUrl}/openai/deployments/${deploymentName}/${endpoint}?api-version=${apiVersion}`
}
// 准备请求头
const requestHeaders = {
'Content-Type': 'application/json',
'api-key': account.apiKey
}
// 移除不需要的头部
delete requestHeaders['anthropic-version']
delete requestHeaders['x-api-key']
delete requestHeaders['host']
// 处理请求体
const processedBody = { ...requestBody }
// 标准化模型名称
if (processedBody.model) {
processedBody.model = normalizeModelName(processedBody.model)
} else {
processedBody.model = 'gpt-4'
}
// 使用统一的代理创建工具
proxyAgent = ProxyHelper.createProxyAgent(account.proxy)
// 配置请求选项
const axiosConfig = {
method: 'POST',
url: requestUrl,
headers: requestHeaders,
data: processedBody,
timeout: 600000, // 10 minutes for Azure OpenAI
validateStatus: () => true,
// 添加连接保活选项
keepAlive: true,
maxRedirects: 5,
// 防止socket hang up
socketKeepAlive: true
}
// 如果有代理,添加代理配置
if (proxyAgent) {
axiosConfig.httpsAgent = proxyAgent
// 为代理添加额外的keep-alive设置
if (proxyAgent.options) {
proxyAgent.options.keepAlive = true
proxyAgent.options.keepAliveMsecs = 1000
}
logger.debug(
`Using proxy for Azure OpenAI request: ${ProxyHelper.getProxyDescription(account.proxy)}`
)
}
// 流式请求特殊处理
if (isStream) {
axiosConfig.responseType = 'stream'
requestHeaders.accept = 'text/event-stream'
} else {
requestHeaders.accept = 'application/json'
}
logger.debug(`Making Azure OpenAI request`, {
requestUrl,
method: 'POST',
endpoint,
deploymentName,
apiVersion,
hasProxy: !!proxyAgent,
proxyInfo: ProxyHelper.maskProxyInfo(account.proxy),
isStream,
requestBodySize: JSON.stringify(processedBody).length
})
logger.debug('Azure OpenAI request headers', {
'content-type': requestHeaders['Content-Type'],
'user-agent': requestHeaders['user-agent'] || 'not-set',
customHeaders: Object.keys(requestHeaders).filter(
(key) => !['Content-Type', 'user-agent'].includes(key)
)
})
logger.debug('Azure OpenAI request body', {
model: processedBody.model,
messages: processedBody.messages?.length || 0,
otherParams: Object.keys(processedBody).filter((key) => !['model', 'messages'].includes(key))
})
const requestStartTime = Date.now()
logger.debug(`🔄 Starting Azure OpenAI HTTP request at ${new Date().toISOString()}`)
// 发送请求
const response = await axios(axiosConfig)
const requestDuration = Date.now() - requestStartTime
logger.debug(`✅ Azure OpenAI HTTP request completed at ${new Date().toISOString()}`)
logger.debug(`Azure OpenAI response received`, {
status: response.status,
statusText: response.statusText,
duration: `${requestDuration}ms`,
responseHeaders: Object.keys(response.headers || {}),
hasData: !!response.data,
contentType: response.headers?.['content-type'] || 'unknown'
})
return response
} catch (error) {
const errorDetails = {
message: error.message,
code: error.code,
status: error.response?.status,
statusText: error.response?.statusText,
responseData: error.response?.data,
requestUrl: requestUrl || 'unknown',
endpoint,
deploymentName: deploymentName || account?.deploymentName || 'unknown',
hasProxy: !!proxyAgent,
proxyType: account?.proxy?.type || 'none',
isTimeout: error.code === 'ECONNABORTED',
isNetworkError: !error.response,
stack: error.stack
}
// 特殊错误类型的详细日志
if (error.code === 'ENOTFOUND') {
logger.error('DNS Resolution Failed for Azure OpenAI', {
...errorDetails,
hostname: requestUrl && requestUrl !== 'unknown' ? new URL(requestUrl).hostname : 'unknown',
suggestion: 'Check if Azure endpoint URL is correct and accessible'
})
} else if (error.code === 'ECONNREFUSED') {
logger.error('Connection Refused by Azure OpenAI', {
...errorDetails,
suggestion: 'Check if proxy settings are correct or Azure service is accessible'
})
} else if (error.code === 'ECONNRESET' || error.message.includes('socket hang up')) {
logger.error('🚨 Azure OpenAI Connection Reset / Socket Hang Up', {
...errorDetails,
suggestion:
'Connection was dropped by Azure OpenAI or proxy. This might be due to long request processing time, proxy timeout, or network instability. Try reducing request complexity or check proxy settings.'
})
} else if (error.code === 'ECONNABORTED' || error.code === 'ETIMEDOUT') {
logger.error('🚨 Azure OpenAI Request Timeout', {
...errorDetails,
timeoutMs: 600000,
suggestion:
'Request exceeded 10-minute timeout. Consider reducing model complexity or check if Azure service is responding slowly.'
})
} else if (
error.code === 'CERT_AUTHORITY_INVALID' ||
error.code === 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'
) {
logger.error('SSL Certificate Error for Azure OpenAI', {
...errorDetails,
suggestion: 'SSL certificate validation failed - check proxy SSL settings'
})
} else if (error.response?.status === 401) {
logger.error('Azure OpenAI Authentication Failed', {
...errorDetails,
suggestion: 'Check if Azure OpenAI API key is valid and not expired'
})
} else if (error.response?.status === 404) {
logger.error('Azure OpenAI Deployment Not Found', {
...errorDetails,
suggestion: 'Check if deployment name and Azure endpoint are correct'
})
} else {
logger.error('Azure OpenAI Request Failed', errorDetails)
}
throw error
}
}
// 安全的流管理器
class StreamManager {
constructor() {
this.activeStreams = new Set()
this.cleanupCallbacks = new Map()
}
registerStream(streamId, cleanup) {
this.activeStreams.add(streamId)
this.cleanupCallbacks.set(streamId, cleanup)
}
cleanup(streamId) {
if (this.activeStreams.has(streamId)) {
try {
const cleanup = this.cleanupCallbacks.get(streamId)
if (cleanup) {
cleanup()
}
} catch (error) {
logger.warn(`Stream cleanup error for ${streamId}:`, error.message)
} finally {
this.activeStreams.delete(streamId)
this.cleanupCallbacks.delete(streamId)
}
}
}
getActiveStreamCount() {
return this.activeStreams.size
}
}
const streamManager = new StreamManager()
// SSE 缓冲区大小限制
const MAX_BUFFER_SIZE = 64 * 1024 // 64KB
const MAX_EVENT_SIZE = 16 * 1024 // 16KB 单个事件最大大小
// 处理流式响应
function handleStreamResponse(upstreamResponse, clientResponse, options = {}) {
const { onData, onEnd, onError } = options
const streamId = `stream_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
logger.info(`Starting Azure OpenAI stream handling`, {
streamId,
upstreamStatus: upstreamResponse.status,
upstreamHeaders: Object.keys(upstreamResponse.headers || {}),
clientRemoteAddress: clientResponse.req?.connection?.remoteAddress,
hasOnData: !!onData,
hasOnEnd: !!onEnd,
hasOnError: !!onError
})
return new Promise((resolve, reject) => {
let buffer = ''
let usageData = null
let actualModel = null
let hasEnded = false
let eventCount = 0
const maxEvents = 10000 // 最大事件数量限制
// 设置响应头
clientResponse.setHeader('Content-Type', 'text/event-stream')
clientResponse.setHeader('Cache-Control', 'no-cache')
clientResponse.setHeader('Connection', 'keep-alive')
clientResponse.setHeader('X-Accel-Buffering', 'no')
// 透传某些头部
const passThroughHeaders = [
'x-request-id',
'x-ratelimit-remaining-requests',
'x-ratelimit-remaining-tokens'
]
passThroughHeaders.forEach((header) => {
const value = upstreamResponse.headers[header]
if (value) {
clientResponse.setHeader(header, value)
}
})
// 立即刷新响应头
if (typeof clientResponse.flushHeaders === 'function') {
clientResponse.flushHeaders()
}
// 解析 SSE 事件以捕获 usage 数据
const parseSSEForUsage = (data) => {
const lines = data.split('\n')
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const jsonStr = line.slice(6) // 移除 'data: ' 前缀
if (jsonStr.trim() === '[DONE]') {
continue
}
const eventData = JSON.parse(jsonStr)
// 获取模型信息
if (eventData.model) {
actualModel = eventData.model
}
// 获取使用统计Responses API: response.completed -> response.usage
if (eventData.type === 'response.completed' && eventData.response) {
if (eventData.response.model) {
actualModel = eventData.response.model
}
if (eventData.response.usage) {
usageData = eventData.response.usage
logger.debug('Captured Azure OpenAI nested usage (response.usage):', usageData)
}
}
// 兼容 Chat Completions 风格(顶层 usage
if (!usageData && eventData.usage) {
usageData = eventData.usage
logger.debug('Captured Azure OpenAI usage (top-level):', usageData)
}
// 检查是否是完成事件
if (eventData.choices && eventData.choices[0] && eventData.choices[0].finish_reason) {
// 这是最后一个 chunk
}
} catch (e) {
// 忽略解析错误
}
}
}
}
// 注册流清理
const cleanup = () => {
if (!hasEnded) {
hasEnded = true
try {
upstreamResponse.data?.removeAllListeners?.()
upstreamResponse.data?.destroy?.()
if (!clientResponse.headersSent) {
clientResponse.status(502).end()
} else if (!clientResponse.destroyed) {
clientResponse.end()
}
} catch (error) {
logger.warn('Stream cleanup error:', error.message)
}
}
}
streamManager.registerStream(streamId, cleanup)
upstreamResponse.data.on('data', (chunk) => {
try {
if (hasEnded || clientResponse.destroyed) {
return
}
eventCount++
if (eventCount > maxEvents) {
logger.warn(`Stream ${streamId} exceeded max events limit`)
cleanup()
return
}
const chunkStr = chunk.toString()
// 转发数据给客户端
if (!clientResponse.destroyed) {
clientResponse.write(chunk)
}
// 同时解析数据以捕获 usage 信息,带缓冲区大小限制
buffer += chunkStr
// 防止缓冲区过大
if (buffer.length > MAX_BUFFER_SIZE) {
logger.warn(`Stream ${streamId} buffer exceeded limit, truncating`)
buffer = buffer.slice(-MAX_BUFFER_SIZE / 2) // 保留后一半
}
// 处理完整的 SSE 事件
if (buffer.includes('\n\n')) {
const events = buffer.split('\n\n')
buffer = events.pop() || '' // 保留最后一个可能不完整的事件
for (const event of events) {
if (event.trim() && event.length <= MAX_EVENT_SIZE) {
parseSSEForUsage(event)
}
}
}
if (onData) {
onData(chunk, { usageData, actualModel })
}
} catch (error) {
logger.error('Error processing Azure OpenAI stream chunk:', error)
if (!hasEnded) {
cleanup()
reject(error)
}
}
})
upstreamResponse.data.on('end', () => {
if (hasEnded) {
return
}
streamManager.cleanup(streamId)
hasEnded = true
try {
// 处理剩余的 buffer
if (buffer.trim() && buffer.length <= MAX_EVENT_SIZE) {
parseSSEForUsage(buffer)
}
if (onEnd) {
onEnd({ usageData, actualModel })
}
if (!clientResponse.destroyed) {
clientResponse.end()
}
resolve({ usageData, actualModel })
} catch (error) {
logger.error('Stream end handling error:', error)
reject(error)
}
})
upstreamResponse.data.on('error', (error) => {
if (hasEnded) {
return
}
streamManager.cleanup(streamId)
hasEnded = true
logger.error('Upstream stream error:', error)
try {
if (onError) {
onError(error)
}
if (!clientResponse.headersSent) {
clientResponse.status(502).json({ error: { message: 'Upstream stream error' } })
} else if (!clientResponse.destroyed) {
clientResponse.end()
}
} catch (cleanupError) {
logger.warn('Error during stream error cleanup:', cleanupError.message)
}
reject(error)
})
// 客户端断开时清理
const clientCleanup = () => {
streamManager.cleanup(streamId)
}
clientResponse.on('close', clientCleanup)
clientResponse.on('aborted', clientCleanup)
clientResponse.on('error', clientCleanup)
})
}
// 处理非流式响应
function handleNonStreamResponse(upstreamResponse, clientResponse) {
try {
// 设置状态码
clientResponse.status(upstreamResponse.status)
// 设置响应头
clientResponse.setHeader('Content-Type', 'application/json')
// 透传某些头部
const passThroughHeaders = [
'x-request-id',
'x-ratelimit-remaining-requests',
'x-ratelimit-remaining-tokens'
]
passThroughHeaders.forEach((header) => {
const value = upstreamResponse.headers[header]
if (value) {
clientResponse.setHeader(header, value)
}
})
// 返回响应数据
const responseData = upstreamResponse.data
clientResponse.json(responseData)
// 提取 usage 数据
const usageData = responseData.usage
const actualModel = responseData.model
return { usageData, actualModel, responseData }
} catch (error) {
logger.error('Error handling Azure OpenAI non-stream response:', error)
throw error
}
}
module.exports = {
handleAzureOpenAIRequest,
handleStreamResponse,
handleNonStreamResponse,
normalizeModelName
}