Conditional azure openai endpoint usage

This commit is contained in:
Kunal Karmakar
2026-02-28 04:33:09 +00:00
committed by Ayaan Zaidi
parent 06a3175cd1
commit 4ed12c18a0

View File

@@ -211,7 +211,7 @@ function resolveAliasError(params: {
return `Alias ${normalized} already points to ${existingKey}.`; return `Alias ${normalized} already points to ${existingKey}.`;
} }
function buildOpenAiHeaders(apiKey: string) { function buildAzureOpenAiHeaders(apiKey: string) {
const headers: Record<string, string> = {}; const headers: Record<string, string> = {};
if (apiKey) { if (apiKey) {
headers["api-key"] = apiKey; headers["api-key"] = apiKey;
@@ -219,6 +219,14 @@ function buildOpenAiHeaders(apiKey: string) {
return headers; return headers;
} }
function buildOpenAiHeaders(apiKey: string) {
const headers: Record<string, string> = {};
if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`;
}
return headers;
}
function buildAnthropicHeaders(apiKey: string) { function buildAnthropicHeaders(apiKey: string) {
const headers: Record<string, string> = { const headers: Record<string, string> = {
"anthropic-version": "2023-06-01", "anthropic-version": "2023-06-01",
@@ -311,16 +319,34 @@ async function requestOpenAiVerification(params: {
modelId: params.modelId, modelId: params.modelId,
endpointPath: "chat/completions", endpointPath: "chat/completions",
}); });
return await requestVerification({ const isBaseUrlAzureUrl = isAzureUrl(params.baseUrl);
endpoint, const headers = isBaseUrlAzureUrl
headers: buildOpenAiHeaders(params.apiKey), ? buildAzureOpenAiHeaders(params.apiKey)
body: { : buildOpenAiHeaders(params.apiKey);
messages: [{ role: "user", content: "Hi" }], if (isBaseUrlAzureUrl) {
temperature: 1, return await requestVerification({
max_completion_tokens: DEFAULT_MAX_TOKENS, endpoint,
stream: false, headers,
}, body: {
}); messages: [{ role: "user", content: "Hi" }],
temperature: 1,
max_completion_tokens: DEFAULT_MAX_TOKENS,
stream: false,
}
});
} else {
return await requestVerification({
endpoint,
headers,
body: {
model: params.modelId,
messages: [{ role: "user", content: "Hi" }],
temperature: 1,
max_tokens: 1,
stream: false,
}
});
}
} }
async function requestAnthropicVerification(params: { async function requestAnthropicVerification(params: {