feat: 新增支持Azure OpenAI账户

This commit is contained in:
shaw
2025-08-23 10:30:29 +08:00
parent e4e72ba5e9
commit 74bcb99142
11 changed files with 1884 additions and 7 deletions

View File

@@ -21,6 +21,7 @@ const geminiRoutes = require('./routes/geminiRoutes')
const openaiGeminiRoutes = require('./routes/openaiGeminiRoutes')
const openaiClaudeRoutes = require('./routes/openaiClaudeRoutes')
const openaiRoutes = require('./routes/openaiRoutes')
const azureOpenaiRoutes = require('./routes/azureOpenaiRoutes')
const webhookRoutes = require('./routes/webhook')
// Import middleware
@@ -241,6 +242,7 @@ class Application {
this.app.use('/openai/gemini', openaiGeminiRoutes)
this.app.use('/openai/claude', openaiClaudeRoutes)
this.app.use('/openai', openaiRoutes)
this.app.use('/azure', azureOpenaiRoutes)
this.app.use('/admin/webhook', webhookRoutes)
// 🏠 根路径重定向到新版管理界面

View File

@@ -5,6 +5,7 @@ const claudeConsoleAccountService = require('../services/claudeConsoleAccountSer
const bedrockAccountService = require('../services/bedrockAccountService')
const geminiAccountService = require('../services/geminiAccountService')
const openaiAccountService = require('../services/openaiAccountService')
const azureOpenaiAccountService = require('../services/azureOpenaiAccountService')
const accountGroupService = require('../services/accountGroupService')
const redis = require('../models/redis')
const { authenticateAdmin } = require('../middleware/auth')
@@ -5227,4 +5228,291 @@ router.put(
}
)
// 🌐 Azure OpenAI 账户管理
// 获取所有 Azure OpenAI 账户
router.get('/azure-openai-accounts', authenticateAdmin, async (req, res) => {
try {
const accounts = await azureOpenaiAccountService.getAllAccounts()
res.json({
success: true,
data: accounts
})
} catch (error) {
logger.error('Failed to fetch Azure OpenAI accounts:', error)
res.status(500).json({
success: false,
message: 'Failed to fetch accounts',
error: error.message
})
}
})
// 创建 Azure OpenAI 账户
router.post('/azure-openai-accounts', authenticateAdmin, async (req, res) => {
try {
const {
name,
description,
accountType,
azureEndpoint,
apiVersion,
deploymentName,
apiKey,
supportedModels,
proxy,
groupId,
priority,
isActive,
schedulable
} = req.body
// 验证必填字段
if (!name) {
return res.status(400).json({
success: false,
message: 'Account name is required'
})
}
if (!azureEndpoint) {
return res.status(400).json({
success: false,
message: 'Azure endpoint is required'
})
}
if (!apiKey) {
return res.status(400).json({
success: false,
message: 'API key is required'
})
}
if (!deploymentName) {
return res.status(400).json({
success: false,
message: 'Deployment name is required'
})
}
// 验证 Azure endpoint 格式
if (!azureEndpoint.match(/^https:\/\/[\w-]+\.openai\.azure\.com$/)) {
return res.status(400).json({
success: false,
message:
'Invalid Azure OpenAI endpoint format. Expected: https://your-resource.openai.azure.com'
})
}
// 测试连接
try {
const testUrl = `${azureEndpoint}/openai/deployments/${deploymentName}?api-version=${apiVersion || '2024-02-01'}`
await axios.get(testUrl, {
headers: {
'api-key': apiKey
},
timeout: 5000
})
} catch (testError) {
if (testError.response?.status === 404) {
logger.warn('Azure OpenAI deployment not found, but continuing with account creation')
} else if (testError.response?.status === 401) {
return res.status(400).json({
success: false,
message: 'Invalid API key or unauthorized access'
})
}
}
const account = await azureOpenaiAccountService.createAccount({
name,
description,
accountType: accountType || 'shared',
azureEndpoint,
apiVersion: apiVersion || '2024-02-01',
deploymentName,
apiKey,
supportedModels,
proxy,
groupId,
priority: priority || 50,
isActive: isActive !== false,
schedulable: schedulable !== false
})
res.json({
success: true,
data: account,
message: 'Azure OpenAI account created successfully'
})
} catch (error) {
logger.error('Failed to create Azure OpenAI account:', error)
res.status(500).json({
success: false,
message: 'Failed to create account',
error: error.message
})
}
})
// 更新 Azure OpenAI 账户
router.put('/azure-openai-accounts/:id', authenticateAdmin, async (req, res) => {
try {
const { id } = req.params
const updates = req.body
const account = await azureOpenaiAccountService.updateAccount(id, updates)
res.json({
success: true,
data: account,
message: 'Azure OpenAI account updated successfully'
})
} catch (error) {
logger.error('Failed to update Azure OpenAI account:', error)
res.status(500).json({
success: false,
message: 'Failed to update account',
error: error.message
})
}
})
// 删除 Azure OpenAI 账户
router.delete('/azure-openai-accounts/:id', authenticateAdmin, async (req, res) => {
try {
const { id } = req.params
await azureOpenaiAccountService.deleteAccount(id)
res.json({
success: true,
message: 'Azure OpenAI account deleted successfully'
})
} catch (error) {
logger.error('Failed to delete Azure OpenAI account:', error)
res.status(500).json({
success: false,
message: 'Failed to delete account',
error: error.message
})
}
})
// 切换 Azure OpenAI 账户状态
router.put('/azure-openai-accounts/:id/toggle', authenticateAdmin, async (req, res) => {
try {
const { id } = req.params
const account = await azureOpenaiAccountService.getAccount(id)
if (!account) {
return res.status(404).json({
success: false,
message: 'Account not found'
})
}
const newStatus = account.isActive === 'true' ? 'false' : 'true'
await azureOpenaiAccountService.updateAccount(id, { isActive: newStatus })
res.json({
success: true,
message: `Account ${newStatus === 'true' ? 'activated' : 'deactivated'} successfully`,
isActive: newStatus === 'true'
})
} catch (error) {
logger.error('Failed to toggle Azure OpenAI account status:', error)
res.status(500).json({
success: false,
message: 'Failed to toggle account status',
error: error.message
})
}
})
// 切换 Azure OpenAI 账户调度状态
router.put(
'/azure-openai-accounts/:accountId/toggle-schedulable',
authenticateAdmin,
async (req, res) => {
try {
const { accountId } = req.params
const result = await azureOpenaiAccountService.toggleSchedulable(accountId)
return res.json({
success: true,
schedulable: result.schedulable,
message: result.schedulable ? '已启用调度' : '已禁用调度'
})
} catch (error) {
logger.error('切换 Azure OpenAI 账户调度状态失败:', error)
return res.status(500).json({
success: false,
message: '切换调度状态失败',
error: error.message
})
}
}
)
// 健康检查单个 Azure OpenAI 账户
router.post('/azure-openai-accounts/:id/health-check', authenticateAdmin, async (req, res) => {
try {
const { id } = req.params
const healthResult = await azureOpenaiAccountService.healthCheckAccount(id)
res.json({
success: true,
data: healthResult
})
} catch (error) {
logger.error('Failed to perform health check:', error)
res.status(500).json({
success: false,
message: 'Failed to perform health check',
error: error.message
})
}
})
// 批量健康检查所有 Azure OpenAI 账户
router.post('/azure-openai-accounts/health-check-all', authenticateAdmin, async (req, res) => {
try {
const healthResults = await azureOpenaiAccountService.performHealthChecks()
res.json({
success: true,
data: healthResults
})
} catch (error) {
logger.error('Failed to perform batch health check:', error)
res.status(500).json({
success: false,
message: 'Failed to perform batch health check',
error: error.message
})
}
})
// 迁移 API Keys 以支持 Azure OpenAI
router.post('/migrate-api-keys-azure', authenticateAdmin, async (req, res) => {
try {
const migratedCount = await azureOpenaiAccountService.migrateApiKeysForAzureSupport()
res.json({
success: true,
message: `Successfully migrated ${migratedCount} API keys for Azure OpenAI support`
})
} catch (error) {
logger.error('Failed to migrate API keys:', error)
res.status(500).json({
success: false,
message: 'Failed to migrate API keys',
error: error.message
})
}
})
module.exports = router

View File

@@ -0,0 +1,318 @@
const express = require('express')
const router = express.Router()
const logger = require('../utils/logger')
const { authenticateApiKey } = require('../middleware/auth')
const azureOpenaiAccountService = require('../services/azureOpenaiAccountService')
const azureOpenaiRelayService = require('../services/azureOpenaiRelayService')
const apiKeyService = require('../services/apiKeyService')
const crypto = require('crypto')
// 支持的模型列表 - 基于真实的 Azure OpenAI 模型
const ALLOWED_MODELS = {
CHAT_MODELS: [
'gpt-4',
'gpt-4-turbo',
'gpt-4o',
'gpt-4o-mini',
'gpt-35-turbo',
'gpt-35-turbo-16k'
],
EMBEDDING_MODELS: ['text-embedding-ada-002', 'text-embedding-3-small', 'text-embedding-3-large']
}
const ALL_ALLOWED_MODELS = [...ALLOWED_MODELS.CHAT_MODELS, ...ALLOWED_MODELS.EMBEDDING_MODELS]
// Azure OpenAI 稳定 API 版本
// const AZURE_API_VERSION = '2024-02-01' // 当前未使用,保留以备后用
// 原子使用统计报告器
class AtomicUsageReporter {
constructor() {
this.reportedUsage = new Set()
this.pendingReports = new Map()
}
async reportOnce(requestId, usageData, apiKeyId, modelToRecord, accountId) {
if (this.reportedUsage.has(requestId)) {
logger.debug(`Usage already reported for request: ${requestId}`)
return false
}
// 防止并发重复报告
if (this.pendingReports.has(requestId)) {
return this.pendingReports.get(requestId)
}
const reportPromise = this._performReport(
requestId,
usageData,
apiKeyId,
modelToRecord,
accountId
)
this.pendingReports.set(requestId, reportPromise)
try {
const result = await reportPromise
this.reportedUsage.add(requestId)
return result
} finally {
this.pendingReports.delete(requestId)
// 清理过期的已报告记录
setTimeout(() => this.reportedUsage.delete(requestId), 60 * 1000) // 1分钟后清理
}
}
async _performReport(requestId, usageData, apiKeyId, modelToRecord, accountId) {
try {
const inputTokens = usageData.prompt_tokens || usageData.input_tokens || 0
const outputTokens = usageData.completion_tokens || usageData.output_tokens || 0
const cacheCreateTokens =
usageData.prompt_tokens_details?.cache_creation_tokens ||
usageData.input_tokens_details?.cache_creation_tokens ||
0
const cacheReadTokens =
usageData.prompt_tokens_details?.cached_tokens ||
usageData.input_tokens_details?.cached_tokens ||
0
await apiKeyService.recordUsage(
apiKeyId,
inputTokens,
outputTokens,
cacheCreateTokens,
cacheReadTokens,
modelToRecord,
accountId
)
// 同步更新 Azure 账户的 lastUsedAt 和累计使用量
try {
const totalTokens = inputTokens + outputTokens + cacheCreateTokens + cacheReadTokens
if (accountId) {
await azureOpenaiAccountService.updateAccountUsage(accountId, totalTokens)
}
} catch (acctErr) {
logger.warn(`Failed to update Azure account usage for ${accountId}: ${acctErr.message}`)
}
logger.info(
`📊 Azure OpenAI Usage recorded for ${requestId}: ` +
`model=${modelToRecord}, ` +
`input=${inputTokens}, output=${outputTokens}, ` +
`cache_create=${cacheCreateTokens}, cache_read=${cacheReadTokens}`
)
return true
} catch (error) {
logger.error('Failed to report Azure OpenAI usage:', error)
return false
}
}
}
const usageReporter = new AtomicUsageReporter()
// 健康检查
router.get('/health', (req, res) => {
res.status(200).json({
status: 'healthy',
service: 'azure-openai-relay',
timestamp: new Date().toISOString()
})
})
// 获取可用模型列表(兼容 OpenAI API
router.get('/models', authenticateApiKey, async (req, res) => {
try {
const models = ALL_ALLOWED_MODELS.map((model) => ({
id: `azure/${model}`,
object: 'model',
created: Date.now(),
owned_by: 'azure-openai'
}))
res.json({
object: 'list',
data: models
})
} catch (error) {
logger.error('Error fetching Azure OpenAI models:', error)
res.status(500).json({ error: { message: 'Failed to fetch models' } })
}
})
// 处理聊天完成请求
router.post('/chat/completions', authenticateApiKey, async (req, res) => {
const requestId = `azure_req_${Date.now()}_${crypto.randomBytes(8).toString('hex')}`
const sessionId = req.sessionId || req.headers['x-session-id'] || null
logger.info(`🚀 Azure OpenAI Chat Request ${requestId}`, {
apiKeyId: req.apiKey?.id,
sessionId,
model: req.body.model,
stream: req.body.stream || false,
messages: req.body.messages?.length || 0
})
try {
// 获取绑定的 Azure OpenAI 账户
let account = null
if (req.apiKey?.azureOpenaiAccountId) {
account = await azureOpenaiAccountService.getAccount(req.apiKey.azureOpenaiAccountId)
if (!account) {
logger.warn(`Bound Azure OpenAI account not found: ${req.apiKey.azureOpenaiAccountId}`)
}
}
// 如果没有绑定账户或账户不可用,选择一个可用账户
if (!account || account.isActive !== 'true') {
account = await azureOpenaiAccountService.selectAvailableAccount(sessionId)
}
// 发送请求到 Azure OpenAI
const response = await azureOpenaiRelayService.handleAzureOpenAIRequest({
account,
requestBody: req.body,
headers: req.headers,
isStream: req.body.stream || false,
endpoint: 'chat/completions'
})
// 处理流式响应
if (req.body.stream) {
await azureOpenaiRelayService.handleStreamResponse(response, res, {
onEnd: async ({ usageData, actualModel }) => {
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
await usageReporter.reportOnce(
requestId,
usageData,
req.apiKey.id,
modelToRecord,
account.id
)
}
},
onError: (error) => {
logger.error(`Stream error for request ${requestId}:`, error)
}
})
} else {
// 处理非流式响应
const { usageData, actualModel } = azureOpenaiRelayService.handleNonStreamResponse(
response,
res
)
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
await usageReporter.reportOnce(
requestId,
usageData,
req.apiKey.id,
modelToRecord,
account.id
)
}
}
} catch (error) {
logger.error(`Azure OpenAI request failed ${requestId}:`, error)
if (!res.headersSent) {
const statusCode = error.response?.status || 500
const errorMessage =
error.response?.data?.error?.message || error.message || 'Internal server error'
res.status(statusCode).json({
error: {
message: errorMessage,
type: 'azure_openai_error',
code: error.code || 'unknown'
}
})
}
}
})
// 处理嵌入请求
router.post('/embeddings', authenticateApiKey, async (req, res) => {
const requestId = `azure_embed_${Date.now()}_${crypto.randomBytes(8).toString('hex')}`
const sessionId = req.sessionId || req.headers['x-session-id'] || null
logger.info(`🚀 Azure OpenAI Embeddings Request ${requestId}`, {
apiKeyId: req.apiKey?.id,
sessionId,
model: req.body.model,
input: Array.isArray(req.body.input) ? req.body.input.length : 1
})
try {
// 获取绑定的 Azure OpenAI 账户
let account = null
if (req.apiKey?.azureOpenaiAccountId) {
account = await azureOpenaiAccountService.getAccount(req.apiKey.azureOpenaiAccountId)
if (!account) {
logger.warn(`Bound Azure OpenAI account not found: ${req.apiKey.azureOpenaiAccountId}`)
}
}
// 如果没有绑定账户或账户不可用,选择一个可用账户
if (!account || account.isActive !== 'true') {
account = await azureOpenaiAccountService.selectAvailableAccount(sessionId)
}
// 发送请求到 Azure OpenAI
const response = await azureOpenaiRelayService.handleAzureOpenAIRequest({
account,
requestBody: req.body,
headers: req.headers,
isStream: false,
endpoint: 'embeddings'
})
// 处理响应
const { usageData, actualModel } = azureOpenaiRelayService.handleNonStreamResponse(
response,
res
)
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
await usageReporter.reportOnce(requestId, usageData, req.apiKey.id, modelToRecord, account.id)
}
} catch (error) {
logger.error(`Azure OpenAI embeddings request failed ${requestId}:`, error)
if (!res.headersSent) {
const statusCode = error.response?.status || 500
const errorMessage =
error.response?.data?.error?.message || error.message || 'Internal server error'
res.status(statusCode).json({
error: {
message: errorMessage,
type: 'azure_openai_error',
code: error.code || 'unknown'
}
})
}
}
})
// 获取使用统计
router.get('/usage', authenticateApiKey, async (req, res) => {
try {
const { start_date, end_date } = req.query
const usage = await apiKeyService.getUsageStats(req.apiKey.id, start_date, end_date)
res.json({
object: 'usage',
data: usage
})
} catch (error) {
logger.error('Error fetching Azure OpenAI usage:', error)
res.status(500).json({ error: { message: 'Failed to fetch usage data' } })
}
})
module.exports = router

View File

@@ -20,6 +20,7 @@ class ApiKeyService {
claudeConsoleAccountId = null,
geminiAccountId = null,
openaiAccountId = null,
azureOpenaiAccountId = null,
bedrockAccountId = null, // 添加 Bedrock 账号ID支持
permissions = 'all', // 'claude', 'gemini', 'openai', 'all'
isActive = true,
@@ -53,6 +54,7 @@ class ApiKeyService {
claudeConsoleAccountId: claudeConsoleAccountId || '',
geminiAccountId: geminiAccountId || '',
openaiAccountId: openaiAccountId || '',
azureOpenaiAccountId: azureOpenaiAccountId || '',
bedrockAccountId: bedrockAccountId || '', // 添加 Bedrock 账号ID
permissions: permissions || 'all',
enableModelRestriction: String(enableModelRestriction),
@@ -86,6 +88,7 @@ class ApiKeyService {
claudeConsoleAccountId: keyData.claudeConsoleAccountId,
geminiAccountId: keyData.geminiAccountId,
openaiAccountId: keyData.openaiAccountId,
azureOpenaiAccountId: keyData.azureOpenaiAccountId,
bedrockAccountId: keyData.bedrockAccountId, // 添加 Bedrock 账号ID
permissions: keyData.permissions,
enableModelRestriction: keyData.enableModelRestriction === 'true',
@@ -174,6 +177,7 @@ class ApiKeyService {
claudeConsoleAccountId: keyData.claudeConsoleAccountId,
geminiAccountId: keyData.geminiAccountId,
openaiAccountId: keyData.openaiAccountId,
azureOpenaiAccountId: keyData.azureOpenaiAccountId,
bedrockAccountId: keyData.bedrockAccountId, // 添加 Bedrock 账号ID
permissions: keyData.permissions || 'all',
tokenLimit: parseInt(keyData.tokenLimit),
@@ -308,6 +312,7 @@ class ApiKeyService {
'claudeConsoleAccountId',
'geminiAccountId',
'openaiAccountId',
'azureOpenaiAccountId',
'bedrockAccountId', // 添加 Bedrock 账号ID
'permissions',
'expiresAt',

View File

@@ -0,0 +1,479 @@
const redisClient = require('../models/redis')
const { v4: uuidv4 } = require('uuid')
const crypto = require('crypto')
const config = require('../../config/config')
const logger = require('../utils/logger')
// 加密相关常量
const ALGORITHM = 'aes-256-cbc'
const IV_LENGTH = 16
// 🚀 安全的加密密钥生成支持动态salt
const ENCRYPTION_SALT = config.security?.azureOpenaiSalt || 'azure-openai-account-default-salt'
class EncryptionKeyManager {
constructor() {
this.keyCache = new Map()
this.keyRotationInterval = 24 * 60 * 60 * 1000 // 24小时
}
getKey(version = 'current') {
const cached = this.keyCache.get(version)
if (cached && Date.now() - cached.timestamp < this.keyRotationInterval) {
return cached.key
}
// 生成新密钥
const key = crypto.scryptSync(config.security.encryptionKey, ENCRYPTION_SALT, 32)
this.keyCache.set(version, {
key,
timestamp: Date.now()
})
logger.debug('🔑 Azure OpenAI encryption key generated/refreshed')
return key
}
// 清理过期密钥
cleanup() {
const now = Date.now()
for (const [version, cached] of this.keyCache.entries()) {
if (now - cached.timestamp > this.keyRotationInterval) {
this.keyCache.delete(version)
}
}
}
}
const encryptionKeyManager = new EncryptionKeyManager()
// 定期清理过期密钥
setInterval(
() => {
encryptionKeyManager.cleanup()
},
60 * 60 * 1000
) // 每小时清理一次
// 生成加密密钥 - 使用安全的密钥管理器
function generateEncryptionKey() {
return encryptionKeyManager.getKey()
}
// Azure OpenAI 账户键前缀
const AZURE_OPENAI_ACCOUNT_KEY_PREFIX = 'azure_openai:account:'
const SHARED_AZURE_OPENAI_ACCOUNTS_KEY = 'shared_azure_openai_accounts'
const ACCOUNT_SESSION_MAPPING_PREFIX = 'azure_openai_session_account_mapping:'
// 加密函数
function encrypt(text) {
if (!text) {
return ''
}
const key = generateEncryptionKey()
const iv = crypto.randomBytes(IV_LENGTH)
const cipher = crypto.createCipheriv(ALGORITHM, key, iv)
let encrypted = cipher.update(text)
encrypted = Buffer.concat([encrypted, cipher.final()])
return `${iv.toString('hex')}:${encrypted.toString('hex')}`
}
// 解密函数 - 移除缓存以提高安全性
function decrypt(text) {
if (!text) {
return ''
}
try {
const key = generateEncryptionKey()
// IV 是固定长度的 32 个十六进制字符16 字节)
const ivHex = text.substring(0, 32)
const encryptedHex = text.substring(33) // 跳过冒号
if (ivHex.length !== 32 || !encryptedHex) {
throw new Error('Invalid encrypted text format')
}
const iv = Buffer.from(ivHex, 'hex')
const encryptedText = Buffer.from(encryptedHex, 'hex')
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv)
let decrypted = decipher.update(encryptedText)
decrypted = Buffer.concat([decrypted, decipher.final()])
const result = decrypted.toString()
return result
} catch (error) {
logger.error('Azure OpenAI decryption error:', error.message)
return ''
}
}
// 创建账户
async function createAccount(accountData) {
const accountId = uuidv4()
const now = new Date().toISOString()
const account = {
id: accountId,
name: accountData.name,
description: accountData.description || '',
accountType: accountData.accountType || 'shared',
groupId: accountData.groupId || null,
priority: accountData.priority || 50,
// Azure OpenAI 特有字段
azureEndpoint: accountData.azureEndpoint || '',
apiVersion: accountData.apiVersion || '2024-02-01', // 使用稳定版本
deploymentName: accountData.deploymentName || 'gpt-4', // 使用默认部署名称
apiKey: encrypt(accountData.apiKey || ''),
// 支持的模型
supportedModels: JSON.stringify(
accountData.supportedModels || ['gpt-4', 'gpt-4-turbo', 'gpt-35-turbo', 'gpt-35-turbo-16k']
),
// 状态字段
isActive: accountData.isActive !== false ? 'true' : 'false',
status: 'active',
schedulable: accountData.schedulable !== false ? 'true' : 'false',
createdAt: now,
updatedAt: now
}
// 代理配置
if (accountData.proxy) {
account.proxy =
typeof accountData.proxy === 'string' ? accountData.proxy : JSON.stringify(accountData.proxy)
}
const client = redisClient.getClientSafe()
await client.hset(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, account)
// 如果是共享账户,添加到共享账户集合
if (account.accountType === 'shared') {
await client.sadd(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
}
logger.info(`Created Azure OpenAI account: ${accountId}`)
return account
}
// 获取账户
async function getAccount(accountId) {
const client = redisClient.getClientSafe()
const accountData = await client.hgetall(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`)
if (!accountData || Object.keys(accountData).length === 0) {
return null
}
// 解密敏感数据(仅用于内部处理,不返回给前端)
if (accountData.apiKey) {
accountData.apiKey = decrypt(accountData.apiKey)
}
// 解析代理配置
if (accountData.proxy && typeof accountData.proxy === 'string') {
try {
accountData.proxy = JSON.parse(accountData.proxy)
} catch (e) {
accountData.proxy = null
}
}
// 解析支持的模型
if (accountData.supportedModels && typeof accountData.supportedModels === 'string') {
try {
accountData.supportedModels = JSON.parse(accountData.supportedModels)
} catch (e) {
accountData.supportedModels = ['gpt-4', 'gpt-35-turbo']
}
}
return accountData
}
// 更新账户
async function updateAccount(accountId, updates) {
const existingAccount = await getAccount(accountId)
if (!existingAccount) {
throw new Error('Account not found')
}
updates.updatedAt = new Date().toISOString()
// 加密敏感数据
if (updates.apiKey) {
updates.apiKey = encrypt(updates.apiKey)
}
// 处理代理配置
if (updates.proxy) {
updates.proxy =
typeof updates.proxy === 'string' ? updates.proxy : JSON.stringify(updates.proxy)
}
// 处理支持的模型
if (updates.supportedModels) {
updates.supportedModels =
typeof updates.supportedModels === 'string'
? updates.supportedModels
: JSON.stringify(updates.supportedModels)
}
// 更新账户类型时处理共享账户集合
const client = redisClient.getClientSafe()
if (updates.accountType && updates.accountType !== existingAccount.accountType) {
if (updates.accountType === 'shared') {
await client.sadd(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
} else {
await client.srem(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
}
}
await client.hset(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, updates)
logger.info(`Updated Azure OpenAI account: ${accountId}`)
// 合并更新后的账户数据
const updatedAccount = { ...existingAccount, ...updates }
// 返回时解析代理配置
if (updatedAccount.proxy && typeof updatedAccount.proxy === 'string') {
try {
updatedAccount.proxy = JSON.parse(updatedAccount.proxy)
} catch (e) {
updatedAccount.proxy = null
}
}
return updatedAccount
}
// 删除账户
async function deleteAccount(accountId) {
const client = redisClient.getClientSafe()
const accountKey = `${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`
// 从Redis中删除账户数据
await client.del(accountKey)
// 从共享账户集合中移除
await client.srem(SHARED_AZURE_OPENAI_ACCOUNTS_KEY, accountId)
logger.info(`Deleted Azure OpenAI account: ${accountId}`)
return true
}
// 获取所有账户
async function getAllAccounts() {
const client = redisClient.getClientSafe()
const keys = await client.keys(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}*`)
if (!keys || keys.length === 0) {
return []
}
const accounts = []
for (const key of keys) {
const accountData = await client.hgetall(key)
if (accountData && Object.keys(accountData).length > 0) {
// 不返回敏感数据给前端
delete accountData.apiKey
// 解析代理配置
if (accountData.proxy && typeof accountData.proxy === 'string') {
try {
accountData.proxy = JSON.parse(accountData.proxy)
} catch (e) {
accountData.proxy = null
}
}
// 解析支持的模型
if (accountData.supportedModels && typeof accountData.supportedModels === 'string') {
try {
accountData.supportedModels = JSON.parse(accountData.supportedModels)
} catch (e) {
accountData.supportedModels = ['gpt-4', 'gpt-35-turbo']
}
}
accounts.push(accountData)
}
}
return accounts
}
// 获取共享账户
async function getSharedAccounts() {
const client = redisClient.getClientSafe()
const accountIds = await client.smembers(SHARED_AZURE_OPENAI_ACCOUNTS_KEY)
if (!accountIds || accountIds.length === 0) {
return []
}
const accounts = []
for (const accountId of accountIds) {
const account = await getAccount(accountId)
if (account && account.isActive === 'true') {
accounts.push(account)
}
}
return accounts
}
// 选择可用账户
async function selectAvailableAccount(sessionId = null) {
// 如果有会话ID尝试获取之前分配的账户
if (sessionId) {
const client = redisClient.getClientSafe()
const mappingKey = `${ACCOUNT_SESSION_MAPPING_PREFIX}${sessionId}`
const accountId = await client.get(mappingKey)
if (accountId) {
const account = await getAccount(accountId)
if (account && account.isActive === 'true' && account.schedulable === 'true') {
logger.debug(`Reusing Azure OpenAI account ${accountId} for session ${sessionId}`)
return account
}
}
}
// 获取所有共享账户
const sharedAccounts = await getSharedAccounts()
// 过滤出可用的账户
const availableAccounts = sharedAccounts.filter(
(acc) => acc.isActive === 'true' && acc.schedulable === 'true'
)
if (availableAccounts.length === 0) {
throw new Error('No available Azure OpenAI accounts')
}
// 按优先级排序并选择
availableAccounts.sort((a, b) => (b.priority || 50) - (a.priority || 50))
const selectedAccount = availableAccounts[0]
// 如果有会话ID保存映射关系
if (sessionId && selectedAccount) {
const client = redisClient.getClientSafe()
const mappingKey = `${ACCOUNT_SESSION_MAPPING_PREFIX}${sessionId}`
await client.setex(mappingKey, 3600, selectedAccount.id) // 1小时过期
}
logger.debug(`Selected Azure OpenAI account: ${selectedAccount.id}`)
return selectedAccount
}
// 更新账户使用量
async function updateAccountUsage(accountId, tokens) {
const client = redisClient.getClientSafe()
const now = new Date().toISOString()
// 使用 HINCRBY 原子操作更新使用量
await client.hincrby(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, 'totalTokensUsed', tokens)
await client.hset(`${AZURE_OPENAI_ACCOUNT_KEY_PREFIX}${accountId}`, 'lastUsedAt', now)
logger.debug(`Updated Azure OpenAI account ${accountId} usage: ${tokens} tokens`)
}
// 健康检查单个账户
async function healthCheckAccount(accountId) {
try {
const account = await getAccount(accountId)
if (!account) {
return { id: accountId, status: 'error', message: 'Account not found' }
}
// 简单检查配置是否完整
if (!account.azureEndpoint || !account.apiKey || !account.deploymentName) {
return {
id: accountId,
status: 'error',
message: 'Incomplete configuration'
}
}
// 可以在这里添加实际的API调用测试
// 暂时返回成功状态
return {
id: accountId,
status: 'healthy',
message: 'Account is configured correctly'
}
} catch (error) {
logger.error(`Health check failed for Azure OpenAI account ${accountId}:`, error)
return {
id: accountId,
status: 'error',
message: error.message
}
}
}
// 批量健康检查
async function performHealthChecks() {
const accounts = await getAllAccounts()
const results = []
for (const account of accounts) {
const result = await healthCheckAccount(account.id)
results.push(result)
}
return results
}
// 切换账户的可调度状态
async function toggleSchedulable(accountId) {
const account = await getAccount(accountId)
if (!account) {
throw new Error('Account not found')
}
const newSchedulable = account.schedulable === 'true' ? 'false' : 'true'
await updateAccount(accountId, { schedulable: newSchedulable })
return {
id: accountId,
schedulable: newSchedulable === 'true'
}
}
// 迁移 API Keys 以支持 Azure OpenAI
async function migrateApiKeysForAzureSupport() {
const client = redisClient.getClientSafe()
const apiKeyIds = await client.smembers('api_keys')
let migratedCount = 0
for (const keyId of apiKeyIds) {
const keyData = await client.hgetall(`api_key:${keyId}`)
if (keyData && !keyData.azureOpenaiAccountId) {
// 添加 Azure OpenAI 账户ID字段初始为空
await client.hset(`api_key:${keyId}`, 'azureOpenaiAccountId', '')
migratedCount++
}
}
logger.info(`Migrated ${migratedCount} API keys for Azure OpenAI support`)
return migratedCount
}
module.exports = {
createAccount,
getAccount,
updateAccount,
deleteAccount,
getAllAccounts,
getSharedAccounts,
selectAvailableAccount,
updateAccountUsage,
healthCheckAccount,
performHealthChecks,
toggleSchedulable,
migrateApiKeysForAzureSupport,
encrypt,
decrypt
}

View File

@@ -0,0 +1,529 @@
const axios = require('axios')
const ProxyHelper = require('../utils/proxyHelper')
const logger = require('../utils/logger')
// 转换模型名称(去掉 azure/ 前缀)
function normalizeModelName(model) {
if (model && model.startsWith('azure/')) {
return model.replace('azure/', '')
}
return model
}
// 处理 Azure OpenAI 请求
async function handleAzureOpenAIRequest({
account,
requestBody,
headers: _headers = {}, // 前缀下划线表示未使用
isStream = false,
endpoint = 'chat/completions'
}) {
// 声明变量在函数顶部,确保在 catch 块中也能访问
let requestUrl = ''
let proxyAgent = null
let deploymentName = ''
try {
// 构建 Azure OpenAI 请求 URL
const baseUrl = account.azureEndpoint
deploymentName = account.deploymentName || 'default'
// Azure Responses API requires preview versions; fall back appropriately
const apiVersion =
account.apiVersion || (endpoint === 'responses' ? '2024-10-01-preview' : '2024-02-01')
if (endpoint === 'chat/completions') {
requestUrl = `${baseUrl}/openai/deployments/${deploymentName}/chat/completions?api-version=${apiVersion}`
} else if (endpoint === 'responses') {
requestUrl = `${baseUrl}/openai/responses?api-version=${apiVersion}`
} else {
requestUrl = `${baseUrl}/openai/deployments/${deploymentName}/${endpoint}?api-version=${apiVersion}`
}
// 准备请求头
const requestHeaders = {
'Content-Type': 'application/json',
'api-key': account.apiKey
}
// 移除不需要的头部
delete requestHeaders['anthropic-version']
delete requestHeaders['x-api-key']
delete requestHeaders['host']
// 处理请求体
const processedBody = { ...requestBody }
// 标准化模型名称
if (processedBody.model) {
processedBody.model = normalizeModelName(processedBody.model)
} else {
processedBody.model = 'gpt-4'
}
// 使用统一的代理创建工具
proxyAgent = ProxyHelper.createProxyAgent(account.proxy)
// 配置请求选项
const axiosConfig = {
method: 'POST',
url: requestUrl,
headers: requestHeaders,
data: processedBody,
timeout: 600000, // 10 minutes for Azure OpenAI
validateStatus: () => true,
// 添加连接保活选项
keepAlive: true,
maxRedirects: 5,
// 防止socket hang up
socketKeepAlive: true
}
// 如果有代理,添加代理配置
if (proxyAgent) {
axiosConfig.httpsAgent = proxyAgent
// 为代理添加额外的keep-alive设置
if (proxyAgent.options) {
proxyAgent.options.keepAlive = true
proxyAgent.options.keepAliveMsecs = 1000
}
logger.debug(
`Using proxy for Azure OpenAI request: ${ProxyHelper.getProxyDescription(account.proxy)}`
)
}
// 流式请求特殊处理
if (isStream) {
axiosConfig.responseType = 'stream'
requestHeaders.accept = 'text/event-stream'
} else {
requestHeaders.accept = 'application/json'
}
logger.debug(`Making Azure OpenAI request`, {
requestUrl,
method: 'POST',
endpoint,
deploymentName,
apiVersion,
hasProxy: !!proxyAgent,
proxyInfo: ProxyHelper.maskProxyInfo(account.proxy),
isStream,
requestBodySize: JSON.stringify(processedBody).length
})
logger.debug('Azure OpenAI request headers', {
'content-type': requestHeaders['Content-Type'],
'user-agent': requestHeaders['user-agent'] || 'not-set',
customHeaders: Object.keys(requestHeaders).filter(
(key) => !['Content-Type', 'user-agent'].includes(key)
)
})
logger.debug('Azure OpenAI request body', {
model: processedBody.model,
messages: processedBody.messages?.length || 0,
otherParams: Object.keys(processedBody).filter((key) => !['model', 'messages'].includes(key))
})
const requestStartTime = Date.now()
logger.debug(`🔄 Starting Azure OpenAI HTTP request at ${new Date().toISOString()}`)
// 发送请求
const response = await axios(axiosConfig)
const requestDuration = Date.now() - requestStartTime
logger.debug(`✅ Azure OpenAI HTTP request completed at ${new Date().toISOString()}`)
logger.debug(`Azure OpenAI response received`, {
status: response.status,
statusText: response.statusText,
duration: `${requestDuration}ms`,
responseHeaders: Object.keys(response.headers || {}),
hasData: !!response.data,
contentType: response.headers?.['content-type'] || 'unknown'
})
return response
} catch (error) {
const errorDetails = {
message: error.message,
code: error.code,
status: error.response?.status,
statusText: error.response?.statusText,
responseData: error.response?.data,
requestUrl: requestUrl || 'unknown',
endpoint,
deploymentName: deploymentName || account?.deploymentName || 'unknown',
hasProxy: !!proxyAgent,
proxyType: account?.proxy?.type || 'none',
isTimeout: error.code === 'ECONNABORTED',
isNetworkError: !error.response,
stack: error.stack
}
// 特殊错误类型的详细日志
if (error.code === 'ENOTFOUND') {
logger.error('DNS Resolution Failed for Azure OpenAI', {
...errorDetails,
hostname: requestUrl && requestUrl !== 'unknown' ? new URL(requestUrl).hostname : 'unknown',
suggestion: 'Check if Azure endpoint URL is correct and accessible'
})
} else if (error.code === 'ECONNREFUSED') {
logger.error('Connection Refused by Azure OpenAI', {
...errorDetails,
suggestion: 'Check if proxy settings are correct or Azure service is accessible'
})
} else if (error.code === 'ECONNRESET' || error.message.includes('socket hang up')) {
logger.error('🚨 Azure OpenAI Connection Reset / Socket Hang Up', {
...errorDetails,
suggestion:
'Connection was dropped by Azure OpenAI or proxy. This might be due to long request processing time, proxy timeout, or network instability. Try reducing request complexity or check proxy settings.'
})
} else if (error.code === 'ECONNABORTED' || error.code === 'ETIMEDOUT') {
logger.error('🚨 Azure OpenAI Request Timeout', {
...errorDetails,
timeoutMs: 600000,
suggestion:
'Request exceeded 10-minute timeout. Consider reducing model complexity or check if Azure service is responding slowly.'
})
} else if (
error.code === 'CERT_AUTHORITY_INVALID' ||
error.code === 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'
) {
logger.error('SSL Certificate Error for Azure OpenAI', {
...errorDetails,
suggestion: 'SSL certificate validation failed - check proxy SSL settings'
})
} else if (error.response?.status === 401) {
logger.error('Azure OpenAI Authentication Failed', {
...errorDetails,
suggestion: 'Check if Azure OpenAI API key is valid and not expired'
})
} else if (error.response?.status === 404) {
logger.error('Azure OpenAI Deployment Not Found', {
...errorDetails,
suggestion: 'Check if deployment name and Azure endpoint are correct'
})
} else {
logger.error('Azure OpenAI Request Failed', errorDetails)
}
throw error
}
}
// 安全的流管理器
class StreamManager {
constructor() {
this.activeStreams = new Set()
this.cleanupCallbacks = new Map()
}
registerStream(streamId, cleanup) {
this.activeStreams.add(streamId)
this.cleanupCallbacks.set(streamId, cleanup)
}
cleanup(streamId) {
if (this.activeStreams.has(streamId)) {
try {
const cleanup = this.cleanupCallbacks.get(streamId)
if (cleanup) {
cleanup()
}
} catch (error) {
logger.warn(`Stream cleanup error for ${streamId}:`, error.message)
} finally {
this.activeStreams.delete(streamId)
this.cleanupCallbacks.delete(streamId)
}
}
}
getActiveStreamCount() {
return this.activeStreams.size
}
}
const streamManager = new StreamManager()
// SSE 缓冲区大小限制
const MAX_BUFFER_SIZE = 64 * 1024 // 64KB
const MAX_EVENT_SIZE = 16 * 1024 // 16KB 单个事件最大大小
// 处理流式响应
function handleStreamResponse(upstreamResponse, clientResponse, options = {}) {
const { onData, onEnd, onError } = options
const streamId = `stream_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
logger.info(`Starting Azure OpenAI stream handling`, {
streamId,
upstreamStatus: upstreamResponse.status,
upstreamHeaders: Object.keys(upstreamResponse.headers || {}),
clientRemoteAddress: clientResponse.req?.connection?.remoteAddress,
hasOnData: !!onData,
hasOnEnd: !!onEnd,
hasOnError: !!onError
})
return new Promise((resolve, reject) => {
let buffer = ''
let usageData = null
let actualModel = null
let hasEnded = false
let eventCount = 0
const maxEvents = 10000 // 最大事件数量限制
// 设置响应头
clientResponse.setHeader('Content-Type', 'text/event-stream')
clientResponse.setHeader('Cache-Control', 'no-cache')
clientResponse.setHeader('Connection', 'keep-alive')
clientResponse.setHeader('X-Accel-Buffering', 'no')
// 透传某些头部
const passThroughHeaders = [
'x-request-id',
'x-ratelimit-remaining-requests',
'x-ratelimit-remaining-tokens'
]
passThroughHeaders.forEach((header) => {
const value = upstreamResponse.headers[header]
if (value) {
clientResponse.setHeader(header, value)
}
})
// 立即刷新响应头
if (typeof clientResponse.flushHeaders === 'function') {
clientResponse.flushHeaders()
}
// 解析 SSE 事件以捕获 usage 数据
const parseSSEForUsage = (data) => {
const lines = data.split('\n')
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const jsonStr = line.slice(6) // 移除 'data: ' 前缀
if (jsonStr.trim() === '[DONE]') {
continue
}
const eventData = JSON.parse(jsonStr)
// 获取模型信息
if (eventData.model) {
actualModel = eventData.model
}
// 获取使用统计Responses API: response.completed -> response.usage
if (eventData.type === 'response.completed' && eventData.response) {
if (eventData.response.model) {
actualModel = eventData.response.model
}
if (eventData.response.usage) {
usageData = eventData.response.usage
logger.debug('Captured Azure OpenAI nested usage (response.usage):', usageData)
}
}
// 兼容 Chat Completions 风格(顶层 usage
if (!usageData && eventData.usage) {
usageData = eventData.usage
logger.debug('Captured Azure OpenAI usage (top-level):', usageData)
}
// 检查是否是完成事件
if (eventData.choices && eventData.choices[0] && eventData.choices[0].finish_reason) {
// 这是最后一个 chunk
}
} catch (e) {
// 忽略解析错误
}
}
}
}
// 注册流清理
const cleanup = () => {
if (!hasEnded) {
hasEnded = true
try {
upstreamResponse.data?.removeAllListeners?.()
upstreamResponse.data?.destroy?.()
if (!clientResponse.headersSent) {
clientResponse.status(502).end()
} else if (!clientResponse.destroyed) {
clientResponse.end()
}
} catch (error) {
logger.warn('Stream cleanup error:', error.message)
}
}
}
streamManager.registerStream(streamId, cleanup)
upstreamResponse.data.on('data', (chunk) => {
try {
if (hasEnded || clientResponse.destroyed) {
return
}
eventCount++
if (eventCount > maxEvents) {
logger.warn(`Stream ${streamId} exceeded max events limit`)
cleanup()
return
}
const chunkStr = chunk.toString()
// 转发数据给客户端
if (!clientResponse.destroyed) {
clientResponse.write(chunk)
}
// 同时解析数据以捕获 usage 信息,带缓冲区大小限制
buffer += chunkStr
// 防止缓冲区过大
if (buffer.length > MAX_BUFFER_SIZE) {
logger.warn(`Stream ${streamId} buffer exceeded limit, truncating`)
buffer = buffer.slice(-MAX_BUFFER_SIZE / 2) // 保留后一半
}
// 处理完整的 SSE 事件
if (buffer.includes('\n\n')) {
const events = buffer.split('\n\n')
buffer = events.pop() || '' // 保留最后一个可能不完整的事件
for (const event of events) {
if (event.trim() && event.length <= MAX_EVENT_SIZE) {
parseSSEForUsage(event)
}
}
}
if (onData) {
onData(chunk, { usageData, actualModel })
}
} catch (error) {
logger.error('Error processing Azure OpenAI stream chunk:', error)
if (!hasEnded) {
cleanup()
reject(error)
}
}
})
upstreamResponse.data.on('end', () => {
if (hasEnded) {
return
}
streamManager.cleanup(streamId)
hasEnded = true
try {
// 处理剩余的 buffer
if (buffer.trim() && buffer.length <= MAX_EVENT_SIZE) {
parseSSEForUsage(buffer)
}
if (onEnd) {
onEnd({ usageData, actualModel })
}
if (!clientResponse.destroyed) {
clientResponse.end()
}
resolve({ usageData, actualModel })
} catch (error) {
logger.error('Stream end handling error:', error)
reject(error)
}
})
upstreamResponse.data.on('error', (error) => {
if (hasEnded) {
return
}
streamManager.cleanup(streamId)
hasEnded = true
logger.error('Upstream stream error:', error)
try {
if (onError) {
onError(error)
}
if (!clientResponse.headersSent) {
clientResponse.status(502).json({ error: { message: 'Upstream stream error' } })
} else if (!clientResponse.destroyed) {
clientResponse.end()
}
} catch (cleanupError) {
logger.warn('Error during stream error cleanup:', cleanupError.message)
}
reject(error)
})
// 客户端断开时清理
const clientCleanup = () => {
streamManager.cleanup(streamId)
}
clientResponse.on('close', clientCleanup)
clientResponse.on('aborted', clientCleanup)
clientResponse.on('error', clientCleanup)
})
}
// 处理非流式响应
function handleNonStreamResponse(upstreamResponse, clientResponse) {
try {
// 设置状态码
clientResponse.status(upstreamResponse.status)
// 设置响应头
clientResponse.setHeader('Content-Type', 'application/json')
// 透传某些头部
const passThroughHeaders = [
'x-request-id',
'x-ratelimit-remaining-requests',
'x-ratelimit-remaining-tokens'
]
passThroughHeaders.forEach((header) => {
const value = upstreamResponse.headers[header]
if (value) {
clientResponse.setHeader(header, value)
}
})
// 返回响应数据
const responseData = upstreamResponse.data
clientResponse.json(responseData)
// 提取 usage 数据
const usageData = responseData.usage
const actualModel = responseData.model
return { usageData, actualModel, responseData }
} catch (error) {
logger.error('Error handling Azure OpenAI non-stream response:', error)
throw error
}
}
module.exports = {
handleAzureOpenAIRequest,
handleStreamResponse,
handleNonStreamResponse,
normalizeModelName
}