fix azure openai usage count issue

This commit is contained in:
Feng Yue
2025-08-30 20:45:01 +08:00
parent 70c8cb5aff
commit dc3d311def
2 changed files with 324 additions and 29 deletions

View File

@@ -197,6 +197,13 @@ router.post('/chat/completions', authenticateApiKey, async (req, res) => {
onEnd: async ({ usageData, actualModel }) => {
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
logger.info(`✅ Usage capture SUCCESS for stream chat request ${requestId}`, {
usageData,
modelToRecord,
inputTokens: usageData.prompt_tokens || usageData.input_tokens || 0,
outputTokens: usageData.completion_tokens || usageData.output_tokens || 0
})
await usageReporter.reportOnce(
requestId,
usageData,
@@ -204,6 +211,14 @@ router.post('/chat/completions', authenticateApiKey, async (req, res) => {
modelToRecord,
account.id
)
} else {
logger.error(`❌ Usage capture FAILED for stream chat request ${requestId}`, {
apiKeyId: req.apiKey.id,
model: req.body.model,
account: account.name,
endpoint: 'chat/completions',
isStream: true
})
}
},
onError: (error) => {
@@ -219,6 +234,13 @@ router.post('/chat/completions', authenticateApiKey, async (req, res) => {
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
logger.info(`✅ Usage capture SUCCESS for non-stream chat request ${requestId}`, {
usageData,
modelToRecord,
inputTokens: usageData.prompt_tokens || usageData.input_tokens || 0,
outputTokens: usageData.completion_tokens || usageData.output_tokens || 0
})
await usageReporter.reportOnce(
requestId,
usageData,
@@ -226,6 +248,15 @@ router.post('/chat/completions', authenticateApiKey, async (req, res) => {
modelToRecord,
account.id
)
} else {
logger.error(`❌ Usage capture FAILED for non-stream chat request ${requestId}`, {
apiKeyId: req.apiKey.id,
model: req.body.model,
account: account.name,
endpoint: 'chat/completions',
isStream: false,
responseStatus: response.status
})
}
}
} catch (error) {
@@ -314,6 +345,13 @@ router.post('/responses', authenticateApiKey, async (req, res) => {
onEnd: async ({ usageData, actualModel }) => {
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
logger.info(`✅ Usage capture SUCCESS for stream responses request ${requestId}`, {
usageData,
modelToRecord,
inputTokens: usageData.prompt_tokens || usageData.input_tokens || 0,
outputTokens: usageData.completion_tokens || usageData.output_tokens || 0
})
await usageReporter.reportOnce(
requestId,
usageData,
@@ -321,6 +359,14 @@ router.post('/responses', authenticateApiKey, async (req, res) => {
modelToRecord,
account.id
)
} else {
logger.error(`❌ Usage capture FAILED for stream responses request ${requestId}`, {
apiKeyId: req.apiKey.id,
model: req.body.model,
account: account.name,
endpoint: 'responses',
isStream: true
})
}
},
onError: (error) => {
@@ -336,6 +382,13 @@ router.post('/responses', authenticateApiKey, async (req, res) => {
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
logger.info(`✅ Usage capture SUCCESS for non-stream responses request ${requestId}`, {
usageData,
modelToRecord,
inputTokens: usageData.prompt_tokens || usageData.input_tokens || 0,
outputTokens: usageData.completion_tokens || usageData.output_tokens || 0
})
await usageReporter.reportOnce(
requestId,
usageData,
@@ -343,6 +396,15 @@ router.post('/responses', authenticateApiKey, async (req, res) => {
modelToRecord,
account.id
)
} else {
logger.error(`❌ Usage capture FAILED for non-stream responses request ${requestId}`, {
apiKeyId: req.apiKey.id,
model: req.body.model,
account: account.name,
endpoint: 'responses',
isStream: false,
responseStatus: response.status
})
}
}
} catch (error) {
@@ -418,7 +480,23 @@ router.post('/embeddings', authenticateApiKey, async (req, res) => {
if (usageData) {
const modelToRecord = actualModel || req.body.model || 'unknown'
logger.info(`✅ Usage capture SUCCESS for embeddings request ${requestId}`, {
usageData,
modelToRecord,
inputTokens: usageData.prompt_tokens || usageData.input_tokens || 0,
outputTokens: usageData.completion_tokens || usageData.output_tokens || 0
})
await usageReporter.reportOnce(requestId, usageData, req.apiKey.id, modelToRecord, account.id)
} else {
logger.error(`❌ Usage capture FAILED for embeddings request ${requestId}`, {
apiKeyId: req.apiKey.id,
model: req.body.model,
account: account.name,
endpoint: 'embeddings',
isStream: false,
responseStatus: response.status
})
}
} catch (error) {
logger.error(`Azure OpenAI embeddings request failed ${requestId}:`, error)