refactor: 重构gemini转部分

This commit is contained in:
shaw
2025-11-25 10:30:39 +08:00
parent fab2df0cf5
commit a4dcfb842e
5 changed files with 2479 additions and 2627 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -2,10 +2,11 @@ const express = require('express')
const { authenticateApiKey } = require('../middleware/auth') const { authenticateApiKey } = require('../middleware/auth')
const logger = require('../utils/logger') const logger = require('../utils/logger')
const { handleChatCompletion } = require('./openaiClaudeRoutes') const { handleChatCompletion } = require('./openaiClaudeRoutes')
// 从 handlers/geminiHandlers.js 导入处理函数
const { const {
handleGenerateContent: geminiHandleGenerateContent, handleGenerateContent: geminiHandleGenerateContent,
handleStreamGenerateContent: geminiHandleStreamGenerateContent handleStreamGenerateContent: geminiHandleStreamGenerateContent
} = require('./geminiRoutes') } = require('../handlers/geminiHandlers')
const openaiRoutes = require('./openaiRoutes') const openaiRoutes = require('./openaiRoutes')
const router = express.Router() const router = express.Router()
@@ -16,25 +17,6 @@ function detectBackendFromModel(modelName) {
return 'claude' // 默认 Claude return 'claude' // 默认 Claude
} }
// 首先尝试使用 modelService 查找模型的 provider
try {
const modelService = require('../services/modelService')
const provider = modelService.getModelProvider(modelName)
if (provider === 'anthropic') {
return 'claude'
}
if (provider === 'openai') {
return 'openai'
}
if (provider === 'google') {
return 'gemini'
}
} catch (error) {
logger.warn(`⚠️ Failed to detect backend from modelService: ${error.message}`)
}
// 降级到前缀匹配作为后备方案
const model = modelName.toLowerCase() const model = modelName.toLowerCase()
// Claude 模型 // Claude 模型
@@ -42,21 +24,16 @@ function detectBackendFromModel(modelName) {
return 'claude' return 'claude'
} }
// OpenAI 模型
if (
model.startsWith('gpt-') ||
model.startsWith('o1-') ||
model.startsWith('o3-') ||
model === 'chatgpt-4o-latest'
) {
return 'openai'
}
// Gemini 模型 // Gemini 模型
if (model.startsWith('gemini-')) { if (model.startsWith('gemini-')) {
return 'gemini' return 'gemini'
} }
// OpenAI 模型
if (model.startsWith('gpt-')) {
return 'openai'
}
// 默认使用 Claude // 默认使用 Claude
return 'claude' return 'claude'
} }

View File

@@ -1,5 +1,3 @@
const fs = require('fs')
const path = require('path')
const logger = require('../utils/logger') const logger = require('../utils/logger')
/** /**
@@ -9,54 +7,22 @@ const logger = require('../utils/logger')
*/ */
class ModelService { class ModelService {
constructor() { constructor() {
this.modelsFile = path.join(process.cwd(), 'data', 'supported_models.json') this.supportedModels = this.getDefaultModels()
this.supportedModels = null
this.fileWatcher = null
} }
/** /**
* 初始化模型服务 * 初始化模型服务
*/ */
async initialize() { async initialize() {
try { const totalModels = Object.values(this.supportedModels).reduce(
this.loadModels() (sum, config) => sum + config.models.length,
this.setupFileWatcher() 0
logger.success('✅ Model service initialized successfully') )
} catch (error) { logger.success(`✅ Model service initialized with ${totalModels} models`)
logger.error('❌ Failed to initialize model service:', error)
}
} }
/** /**
* 加载支持的模型配置 * 获取支持的模型配置
*/
loadModels() {
try {
if (fs.existsSync(this.modelsFile)) {
const data = fs.readFileSync(this.modelsFile, 'utf8')
this.supportedModels = JSON.parse(data)
const totalModels = Object.values(this.supportedModels).reduce(
(sum, config) => sum + config.models.length,
0
)
logger.info(`📋 Loaded ${totalModels} supported models from configuration`)
} else {
logger.warn('⚠️ Supported models file not found, using defaults')
this.supportedModels = this.getDefaultModels()
// 创建默认配置文件
this.saveDefaultConfig()
}
} catch (error) {
logger.error('❌ Failed to load supported models:', error)
this.supportedModels = this.getDefaultModels()
}
}
/**
* 获取默认模型配置(后备方案)
*/ */
getDefaultModels() { getDefaultModels() {
return { return {
@@ -64,6 +30,8 @@ class ModelService {
provider: 'anthropic', provider: 'anthropic',
description: 'Claude models from Anthropic', description: 'Claude models from Anthropic',
models: [ models: [
'claude-opus-4-5-20251101',
'claude-haiku-4-5-20251001',
'claude-sonnet-4-5-20250929', 'claude-sonnet-4-5-20250929',
'claude-opus-4-1-20250805', 'claude-opus-4-1-20250805',
'claude-sonnet-4-20250514', 'claude-sonnet-4-20250514',
@@ -79,55 +47,22 @@ class ModelService {
provider: 'openai', provider: 'openai',
description: 'OpenAI GPT models', description: 'OpenAI GPT models',
models: [ models: [
'gpt-4o', 'gpt-5.1-2025-11-13',
'gpt-4o-mini', 'gpt-5.1-codex-mini',
'gpt-4.1', 'gpt-5.1-codex',
'gpt-4.1-mini', 'gpt-5.1-codex-max',
'gpt-4.1-nano', 'gpt-5-2025-08-07',
'gpt-4-turbo', 'gpt-5-codex'
'gpt-4',
'gpt-3.5-turbo',
'o3',
'o4-mini',
'chatgpt-4o-latest'
] ]
}, },
gemini: { gemini: {
provider: 'google', provider: 'google',
description: 'Google Gemini models', description: 'Google Gemini models',
models: [ models: ['gemini-2.5-pro', 'gemini-3-pro-preview', 'gemini-2.5-flash']
'gemini-1.5-pro',
'gemini-1.5-flash',
'gemini-2.0-flash',
'gemini-2.0-flash-exp',
'gemini-2.0-flash-thinking',
'gemini-2.0-flash-thinking-exp',
'gemini-2.0-pro',
'gemini-2.5-flash',
'gemini-2.5-flash-lite',
'gemini-2.5-pro'
]
} }
} }
} }
/**
* 保存默认配置到文件
*/
saveDefaultConfig() {
try {
const dataDir = path.dirname(this.modelsFile)
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true })
}
fs.writeFileSync(this.modelsFile, JSON.stringify(this.supportedModels, null, 2))
logger.info('💾 Created default supported_models.json configuration')
} catch (error) {
logger.error('❌ Failed to save default config:', error)
}
}
/** /**
* 获取所有支持的模型OpenAI API 格式) * 获取所有支持的模型OpenAI API 格式)
*/ */
@@ -183,83 +118,27 @@ class ModelService {
return model ? model.owned_by : null return model ? model.owned_by : null
} }
/**
* 重新加载模型配置
*/
reloadModels() {
logger.info('🔄 Reloading supported models configuration...')
this.loadModels()
}
/**
* 设置文件监听器(监听配置文件变化)
*/
setupFileWatcher() {
try {
// 如果已有监听器,先关闭
if (this.fileWatcher) {
this.fileWatcher.close()
this.fileWatcher = null
}
// 只有文件存在时才设置监听器
if (!fs.existsSync(this.modelsFile)) {
logger.debug('📋 Models file does not exist yet, skipping file watcher setup')
return
}
// 使用 fs.watchFile 监听文件变化
const watchOptions = {
persistent: true,
interval: 60000 // 每60秒检查一次
}
let lastMtime = fs.statSync(this.modelsFile).mtimeMs
fs.watchFile(this.modelsFile, watchOptions, (curr, _prev) => {
if (curr.mtimeMs !== lastMtime) {
lastMtime = curr.mtimeMs
logger.info('📋 Detected change in supported_models.json, reloading...')
this.reloadModels()
}
})
// 保存引用以便清理
this.fileWatcher = {
close: () => fs.unwatchFile(this.modelsFile)
}
logger.info('👁️ File watcher set up for supported_models.json')
} catch (error) {
logger.error('❌ Failed to setup file watcher:', error)
}
}
/** /**
* 获取服务状态 * 获取服务状态
*/ */
getStatus() { getStatus() {
const totalModels = this.supportedModels const totalModels = Object.values(this.supportedModels).reduce(
? Object.values(this.supportedModels).reduce((sum, config) => sum + config.models.length, 0) (sum, config) => sum + config.models.length,
: 0 0
)
return { return {
initialized: this.supportedModels !== null, initialized: true,
totalModels, totalModels,
providers: this.supportedModels ? Object.keys(this.supportedModels) : [], providers: Object.keys(this.supportedModels)
fileExists: fs.existsSync(this.modelsFile)
} }
} }
/** /**
* 清理资源 * 清理资源(保留接口兼容性)
*/ */
cleanup() { cleanup() {
if (this.fileWatcher) { logger.debug('📋 Model service cleanup (no-op)')
this.fileWatcher.close()
this.fileWatcher = null
logger.debug('📋 Model service file watcher closed')
}
} }
} }