diff --git a/src/routes/azureOpenaiRoutes.js b/src/routes/azureOpenaiRoutes.js index a0f9b344..5de9117f 100644 --- a/src/routes/azureOpenaiRoutes.js +++ b/src/routes/azureOpenaiRoutes.js @@ -157,6 +157,16 @@ router.post('/chat/completions', authenticateApiKey, async (req, res) => { messages: req.body.messages?.length || 0 }) + // Detailed logging for debugging - INCLUDING SENSITIVE DATA + logger.debug(`๐Ÿ“‹ Azure OpenAI Chat Request Details ${requestId}`, { + completeHeaders: req.headers, + fullRequestBody: req.body, + clientIP: req.ip || req.connection?.remoteAddress, + method: req.method, + originalUrl: req.originalUrl, + query: req.query + }) + try { // ่Žทๅ–็ป‘ๅฎš็š„ Azure OpenAI ่ดฆๆˆท let account = null @@ -250,26 +260,14 @@ router.post('/responses', authenticateApiKey, async (req, res) => { messages: req.body.messages?.length || 0 }) - // Detailed logging for debugging + // Detailed logging for debugging - INCLUDING SENSITIVE DATA logger.debug(`๐Ÿ“‹ Azure OpenAI Responses Request Details ${requestId}`, { - headers: { - 'content-type': req.headers['content-type'], - 'user-agent': req.headers['user-agent'], - 'x-forwarded-for': req.headers['x-forwarded-for'], - 'authorization': req.headers.authorization ? '[REDACTED]' : 'not present' - }, - requestBody: { - model: req.body.model, - messages: req.body.messages, - stream: req.body.stream, - temperature: req.body.temperature, - max_tokens: req.body.max_tokens, - top_p: req.body.top_p, - frequency_penalty: req.body.frequency_penalty, - presence_penalty: req.body.presence_penalty, - stop: req.body.stop, - logit_bias: req.body.logit_bias - } + completeHeaders: req.headers, + fullRequestBody: req.body, + clientIP: req.ip || req.connection?.remoteAddress, + method: req.method, + originalUrl: req.originalUrl, + query: req.query }) try { @@ -378,6 +376,16 @@ router.post('/embeddings', authenticateApiKey, async (req, res) => { input: Array.isArray(req.body.input) ? req.body.input.length : 1 }) + // Detailed logging for debugging - INCLUDING SENSITIVE DATA + logger.debug(`๐Ÿ“‹ Azure OpenAI Embeddings Request Details ${requestId}`, { + completeHeaders: req.headers, + fullRequestBody: req.body, + clientIP: req.ip || req.connection?.remoteAddress, + method: req.method, + originalUrl: req.originalUrl, + query: req.query + }) + try { // ่Žทๅ–็ป‘ๅฎš็š„ Azure OpenAI ่ดฆๆˆท let account = null diff --git a/src/services/azureOpenaiRelayService.js b/src/services/azureOpenaiRelayService.js index b23f2d48..dab115a6 100644 --- a/src/services/azureOpenaiRelayService.js +++ b/src/services/azureOpenaiRelayService.js @@ -52,24 +52,22 @@ async function handleAzureOpenAIRequest({ // ๅค„็†่ฏทๆฑ‚ไฝ“ const processedBody = { ...requestBody } - // Detailed logging for responses endpoint - if (endpoint === 'responses') { - logger.debug(`๐Ÿ” Azure OpenAI Responses Endpoint Details`, { - targetUrl: requestUrl, - headers: { - 'Content-Type': requestHeaders['Content-Type'], - 'api-key': '[REDACTED]', - 'User-Agent': requestHeaders['User-Agent'] || 'not set' - }, - processedRequestBody: processedBody, - account: { - name: account.name, - azureEndpoint: account.azureEndpoint, - deploymentName: account.deploymentName, - apiVersion: apiVersion - } - }) - } + // Detailed logging for all endpoints - INCLUDING SENSITIVE DATA + logger.debug(`๐Ÿ” Azure OpenAI ${endpoint.toUpperCase()} Complete Request Details`, { + targetUrl: requestUrl, + completeHeaders: requestHeaders, + fullProcessedRequestBody: processedBody, + account: { + id: account.id, + name: account.name, + azureEndpoint: account.azureEndpoint, + deploymentName: account.deploymentName, + apiVersion: apiVersion, + apiKeyPreview: account.apiKey ? account.apiKey.substring(0, 10) + '...' : 'not set' + }, + endpoint: endpoint, + originalRequestBody: requestBody + }) // ๆ ‡ๅ‡†ๅŒ–ๆจกๅž‹ๅ็งฐ if (processedBody.model) { @@ -129,18 +127,13 @@ async function handleAzureOpenAIRequest({ requestBodySize: JSON.stringify(processedBody).length }) - logger.debug('Azure OpenAI request headers', { - 'content-type': requestHeaders['Content-Type'], - 'user-agent': requestHeaders['user-agent'] || 'not-set', - customHeaders: Object.keys(requestHeaders).filter( - (key) => !['Content-Type', 'user-agent'].includes(key) - ) + logger.debug('Azure OpenAI Complete Request Headers - INCLUDING SENSITIVE DATA', { + allHeaders: requestHeaders }) - logger.debug('Azure OpenAI request body', { - model: processedBody.model, - messages: processedBody.messages?.length || 0, - otherParams: Object.keys(processedBody).filter((key) => !['model', 'messages'].includes(key)) + logger.debug('Azure OpenAI Complete Request Body - INCLUDING SENSITIVE DATA', { + fullRequestBody: processedBody, + requestBodySize: JSON.stringify(processedBody).length }) const requestStartTime = Date.now() @@ -161,24 +154,17 @@ async function handleAzureOpenAIRequest({ contentType: response.headers?.['content-type'] || 'unknown' }) - // Enhanced logging for responses endpoint - if (endpoint === 'responses') { - logger.debug(`๐Ÿ” Azure OpenAI Responses Endpoint Response`, { - status: response.status, - statusText: response.statusText, - headers: { - 'content-type': response.headers['content-type'], - 'x-request-id': response.headers['x-request-id'] || response.headers['x-ms-request-id'], - 'x-ratelimit-remaining': response.headers['x-ratelimit-remaining-requests'], - 'x-ratelimit-reset': response.headers['x-ratelimit-reset-requests'] - }, - responseBodyPreview: isStream ? '[STREAM]' : ( - response.data ? JSON.stringify(response.data).substring(0, 500) + (JSON.stringify(response.data).length > 500 ? '...' : '') : 'no data' - ), - endpoint: 'responses', - duration: `${requestDuration}ms` - }) - } + // Enhanced logging for all endpoints - INCLUDING SENSITIVE DATA + logger.debug(`๐Ÿ” Azure OpenAI ${endpoint.toUpperCase()} Complete Response`, { + status: response.status, + statusText: response.statusText, + completeResponseHeaders: response.headers, + fullResponseBody: isStream ? '[STREAM - Cannot capture stream data]' : response.data, + responseDataType: typeof response.data, + responseSize: response.headers['content-length'] || 'unknown', + endpoint: endpoint, + duration: `${requestDuration}ms` + }) return response } catch (error) { @@ -245,16 +231,25 @@ async function handleAzureOpenAIRequest({ logger.error('Azure OpenAI Request Failed', errorDetails) } - // Enhanced error logging for responses endpoint - if (endpoint === 'responses') { - logger.error(`โŒ Azure OpenAI Responses Endpoint Error`, { - ...errorDetails, - endpoint: 'responses', - targetUrl: requestUrl, - errorType: error.response ? 'HTTP_ERROR' : 'NETWORK_ERROR', - responseBody: error.response?.data ? JSON.stringify(error.response.data) : 'no response body' - }) - } + // Enhanced error logging for all endpoints - INCLUDING SENSITIVE DATA + logger.error(`โŒ Azure OpenAI ${endpoint.toUpperCase()} Complete Error Details`, { + ...errorDetails, + endpoint: endpoint, + targetUrl: requestUrl, + errorType: error.response ? 'HTTP_ERROR' : 'NETWORK_ERROR', + completeRequestHeaders: requestHeaders, + fullRequestBody: processedBody, + completeResponseHeaders: error.response?.headers, + fullResponseBody: error.response?.data, + account: { + id: account.id, + name: account.name, + azureEndpoint: account.azureEndpoint, + deploymentName: account.deploymentName, + apiVersion: account.apiVersion, + fullApiKey: account.apiKey + } + }) throw error }