diff --git a/.eslintrc.cjs b/.eslintrc.cjs index f8c79f9c..30281309 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -14,6 +14,7 @@ module.exports = { rules: { // 基础规则 'no-console': 'off', // Node.js 项目允许 console + 'consistent-return': 'off', 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'warn', 'prettier/prettier': 'error', @@ -33,7 +34,6 @@ module.exports = { // 代码质量 eqeqeq: ['error', 'always'], curly: ['error', 'all'], - 'consistent-return': 'error', 'no-throw-literal': 'error', 'prefer-promise-reject-errors': 'error', @@ -43,7 +43,6 @@ module.exports = { 'template-curly-spacing': ['error', 'never'], // Node.js 特定规则 - 'no-process-exit': 'error', 'no-path-concat': 'error', 'handle-callback-err': 'error', diff --git a/src/app.js b/src/app.js index f33b8a63..9af1a91e 100644 --- a/src/app.js +++ b/src/app.js @@ -19,6 +19,7 @@ const apiStatsRoutes = require('./routes/apiStats') const geminiRoutes = require('./routes/geminiRoutes') const openaiGeminiRoutes = require('./routes/openaiGeminiRoutes') const openaiClaudeRoutes = require('./routes/openaiClaudeRoutes') +const openaiRoutes = require('./routes/openaiRoutes') // Import middleware const { @@ -234,6 +235,7 @@ class Application { this.app.use('/gemini', geminiRoutes) this.app.use('/openai/gemini', openaiGeminiRoutes) this.app.use('/openai/claude', openaiClaudeRoutes) + this.app.use('/openai', openaiRoutes) // 🏠 根路径重定向到新版管理界面 this.app.get('/', (req, res) => { @@ -257,9 +259,6 @@ class Application { let version = process.env.APP_VERSION || process.env.VERSION if (!version) { try { - // 尝试从VERSION文件读取 - const fs = require('fs') - const path = require('path') const versionFile = path.join(__dirname, '..', 'VERSION') if (fs.existsSync(versionFile)) { version = fs.readFileSync(versionFile, 'utf8').trim() diff --git a/src/routes/openaiRoutes.js b/src/routes/openaiRoutes.js new file mode 100644 index 00000000..3751121f --- /dev/null +++ b/src/routes/openaiRoutes.js @@ -0,0 +1,119 @@ +const express = require('express') +const axios = require('axios') +const router = express.Router() +const logger = require('../utils/logger') +const { authenticateApiKey } = require('../middleware/auth') +const redis = require('../models/redis') +const claudeAccountService = require('../services/claudeAccountService') + +// 选择一个可用的 OpenAI 账户,并返回解密后的 accessToken +async function getOpenAIAuthToken() { + try { + const accounts = await redis.getAllOpenAIAccounts() + if (!accounts || accounts.length === 0) { + throw new Error('No OpenAI accounts found in Redis') + } + + // 简单选择策略:选择第一个启用并活跃的账户 + const candidate = + accounts.find((a) => String(a.enabled) === 'true' && String(a.isActive) === 'true') || + accounts[0] + + if (!candidate || !candidate.accessToken) { + throw new Error('No valid OpenAI account with accessToken') + } + + const accessToken = claudeAccountService._decryptSensitiveData(candidate.accessToken) + if (!accessToken) { + throw new Error('Failed to decrypt OpenAI accessToken') + } + return { accessToken, accountId: candidate.accountId || 'unknown' } + } catch (error) { + logger.error('Failed to get OpenAI auth token from Redis:', error) + throw error + } +} + +router.post('/responses', authenticateApiKey, async (req, res) => { + let upstream = null + try { + const { accessToken, accountId } = await getOpenAIAuthToken() + // 基于白名单构造上游所需的请求头,确保键为小写且值受控 + const incoming = req.headers || {} + + const allowedKeys = ['version', 'openai-beta', 'session_id'] + + const headers = {} + for (const key of allowedKeys) { + if (incoming[key] !== undefined) { + headers[key] = incoming[key] + } + } + + // 覆盖或新增必要头部 + headers['authorization'] = `Bearer ${accessToken}` + headers['chatgpt-account-id'] = accountId + headers['host'] = 'chatgpt.com' + headers['accept'] = 'text/event-stream' + headers['content-type'] = 'application/json' + req.body['store'] = false + // 使用流式转发,保持与上游一致 + upstream = await axios.post('https://chatgpt.com/backend-api/codex/responses', req.body, { + headers, + responseType: 'stream', + timeout: 60000, + validateStatus: () => true + }) + res.status(upstream.status) + res.setHeader('Content-Type', 'text/event-stream') + res.setHeader('Cache-Control', 'no-cache') + res.setHeader('Connection', 'keep-alive') + res.setHeader('X-Accel-Buffering', 'no') + + // 透传关键诊断头,避免传递不安全或与传输相关的头 + const passThroughHeaderKeys = ['openai-version', 'x-request-id', 'openai-processing-ms'] + for (const key of passThroughHeaderKeys) { + const val = upstream.headers?.[key] + if (val !== undefined) { + res.setHeader(key, val) + } + } + + // 立即刷新响应头,开始 SSE + if (typeof res.flushHeaders === 'function') { + res.flushHeaders() + } + + upstream.data.on('error', (err) => { + logger.error('Upstream stream error:', err) + if (!res.headersSent) { + res.status(502).json({ error: { message: 'Upstream stream error' } }) + } else { + res.end() + } + }) + + upstream.data.pipe(res) + + // 客户端断开时清理上游流 + const cleanup = () => { + try { + upstream.data?.unpipe?.(res) + upstream.data?.destroy?.() + } catch (_) { + // + } + } + req.on('close', cleanup) + req.on('aborted', cleanup) + } catch (error) { + logger.error('Proxy to ChatGPT codex/responses failed:', error) + const status = error.response?.status || 500 + const message = error.response?.data || error.message || 'Internal server error' + if (!res.headersSent) { + res.status(status).json({ error: { message } }) + } + } +}) + +module.exports = router