🔐 feat: Implement Entra ID authentication for Azure OpenAI integration

- Added support for Entra ID authentication in OpenAIClient and related services.
- Updated header management to conditionally use Entra ID access tokens or API keys based on environment configuration.
- Introduced utility functions for Entra ID token retrieval and credential management.
- Enhanced tests to verify Entra ID authentication flow and its integration with Azure configurations.
This commit is contained in:
victorbjorkgren 2025-09-12 17:29:43 +02:00 committed by victorbjor
parent c40554c03b
commit f7a20624c2
9 changed files with 212 additions and 18 deletions

View file

@ -6,8 +6,10 @@ const {
Tokenizer,
createFetch,
resolveHeaders,
shouldUseEntraId,
constructAzureURL,
getModelMaxTokens,
getEntraIdAccessToken,
genAzureChatCompletion,
getModelMaxOutputTokens,
createStreamEventHandlers,
@ -614,7 +616,7 @@ class OpenAIClient extends BaseClient {
return (reply ?? '').trim();
}
initializeLLM({
async initializeLLM({
model = openAISettings.model.default,
modelName,
temperature = 0.2,
@ -753,7 +755,14 @@ class OpenAIClient extends BaseClient {
this.options.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
this.options.headers['api-key'] = this.apiKey;
if (shouldUseEntraId()) {
this.options.headers = {
...this.options.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
} else {
this.options.headers['api-key'] = this.apiKey;
}
}
}
@ -812,7 +821,7 @@ ${convo}
try {
this.abortController = new AbortController();
const llm = this.initializeLLM({
const llm = await this.initializeLLM({
...modelOptions,
conversationId,
context: 'title',
@ -961,7 +970,7 @@ ${convo}
const initialPromptTokens = this.maxContextTokens - remainingContextTokens;
logger.debug('[OpenAIClient] initialPromptTokens', initialPromptTokens);
const llm = this.initializeLLM({
const llm = await this.initializeLLM({
model,
temperature: 0.2,
context: 'summary',
@ -1187,7 +1196,14 @@ ${convo}
this.options.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
this.options.headers['api-key'] = this.apiKey;
if (shouldUseEntraId()) {
this.options.headers = {
...this.options.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
} else {
this.options.headers['api-key'] = this.apiKey;
}
}
}
@ -1208,7 +1224,14 @@ ${convo}
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion };
opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey };
if (shouldUseEntraId()) {
opts.defaultHeaders = {
...opts.defaultHeaders,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
} else {
opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey };
}
}
if (this.isOmni === true && modelOptions.max_tokens != null) {

View file

@ -1,4 +1,4 @@
const { isUserProvided } = require('@librechat/api');
const { isUserProvided, shouldUseEntraId } = require('@librechat/api');
const { EModelEndpoint } = require('librechat-data-provider');
const { generateConfig } = require('~/server/utils/handleText');
@ -17,23 +17,28 @@ const {
AZURE_ASSISTANTS_BASE_URL,
} = process.env ?? {};
// Note: For Entra ID, we can't determine the actual token here since this is synchronous
// The actual token will be obtained in the initialize functions.
// Still we need to set a placeholder token to avoid errors.
const finalAzureOpenAIApiKey = shouldUseEntraId() ? 'entra-id-placeholder' : azureOpenAIApiKey;
const useAzurePlugins = !!PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins
? isUserProvided(azureOpenAIApiKey)
? isUserProvided(finalAzureOpenAIApiKey)
: isUserProvided(openAIApiKey);
module.exports = {
config: {
openAIApiKey,
azureOpenAIApiKey,
azureOpenAIApiKey: finalAzureOpenAIApiKey,
useAzurePlugins,
userProvidedOpenAI,
googleKey,
[EModelEndpoint.anthropic]: generateConfig(anthropicApiKey),
[EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken),
[EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY),
[EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL),
[EModelEndpoint.azureOpenAI]: generateConfig(finalAzureOpenAIApiKey, AZURE_OPENAI_BASEURL),
[EModelEndpoint.assistants]: generateConfig(
assistantsApiKey,
ASSISTANTS_BASE_URL,

View file

@ -1,6 +1,12 @@
const OpenAI = require('openai');
const { ProxyAgent } = require('undici');
const { constructAzureURL, isUserProvided, resolveHeaders } = require('@librechat/api');
const {
constructAzureURL,
isUserProvided,
resolveHeaders,
shouldUseEntraId,
getEntraIdAccessToken,
} = require('@librechat/api');
const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider');
const {
checkUserKeyExpiry,
@ -108,12 +114,19 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
azureOptions,
});
apiKey = azureOptions.azureOpenAIApiKey;
// For Entra ID, we need to get the actual access token
if (shouldUseEntraId()) {
apiKey = 'entra-id-placeholder';
headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
} else {
apiKey = azureOptions.azureOpenAIApiKey;
headers['api-key'] = apiKey;
}
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
opts.defaultHeaders = resolveHeaders({
headers: {
...headers,
'api-key': apiKey,
'OpenAI-Beta': `assistants=${version}`,
},
user: req.user,
@ -137,7 +150,11 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
if (shouldUseEntraId()) {
clientOptions.headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
} else {
clientOptions.headers['api-key'] = apiKey;
}
}
}
}

View file

@ -6,6 +6,8 @@ const {
getOpenAIConfig,
getAzureCredentials,
createHandleLLMNewToken,
shouldUseEntraId,
getEntraIdAccessToken,
} = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const OpenAIClient = require('~/app/clients/OpenAIClient');
@ -106,11 +108,25 @@ const initializeClient = async ({
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
if (shouldUseEntraId()) {
clientOptions.headers = {
...clientOptions.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
} else {
clientOptions.headers['api-key'] = apiKey;
}
}
} else if (isAzureOpenAI) {
clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey;
if (shouldUseEntraId()) {
clientOptions.headers = {
...clientOptions.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
apiKey = 'entra-id-placeholder';
}
}
/** @type {undefined | TBaseEndpoint} */

View file

@ -428,4 +428,29 @@ describe('initializeClient', () => {
expect(result.openAIApiKey).toBe('test');
expect(result.client.options.reverseProxyUrl).toBe('https://user-provided-url.com');
});
test('should use Entra ID authentication when AZURE_OPENAI_USE_ENTRA_ID is enabled', async () => {
process.env.AZURE_OPENAI_USE_ENTRA_ID = 'true';
process.env.AZURE_API_KEY = 'test-azure-api-key';
process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'test-instance';
process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'test-deployment';
process.env.AZURE_OPENAI_API_VERSION = '2024-12-01-preview';
const req = {
body: {
key: null,
endpoint: EModelEndpoint.azureOpenAI,
model: 'gpt-4-vision-preview',
},
user: { id: '123' },
app: { locals: {} },
config: mockAppConfig,
};
const res = {};
const endpointOption = {};
const result = await initializeClient({ req, res, endpointOption });
expect(result.openAIApiKey).toBeTruthy();
});
});

View file

@ -1326,4 +1326,22 @@ describe('getOpenAIConfig', () => {
});
});
});
describe('Entra ID Authentication', () => {
it('should handle Entra ID authentication in Azure configuration', () => {
const azure = {
azureOpenAIApiInstanceName: 'test-instance',
azureOpenAIApiDeploymentName: 'test-deployment',
azureOpenAIApiVersion: '2023-05-15',
azureOpenAIApiKey: 'entra-id-placeholder',
};
const result = getOpenAIConfig(mockApiKey, { azure });
expect(result.llmConfig).toMatchObject({
...azure,
model: 'test-deployment',
});
});
});
});

View file

@ -6,7 +6,7 @@ import type {
UserKeyValues,
} from '~/types';
import { createHandleLLMNewToken } from '~/utils/generators';
import { getAzureCredentials } from '~/utils/azure';
import { getAzureCredentials, getEntraIdAccessToken, shouldUseEntraId } from '~/utils/azure';
import { isUserProvided } from '~/utils/common';
import { resolveHeaders } from '~/utils/env';
import { getOpenAIConfig } from './config';
@ -110,12 +110,30 @@ export const initializeOpenAI = async ({
if (!clientOptions.headers) {
clientOptions.headers = {};
}
clientOptions.headers['api-key'] = apiKey;
if (shouldUseEntraId()) {
clientOptions.headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
} else {
clientOptions.headers['api-key'] = apiKey || '';
}
} else {
apiKey = azureOptions.azureOpenAIApiKey || '';
clientOptions.azure = azureOptions;
if (shouldUseEntraId()) {
apiKey = 'entra-id-placeholder';
clientOptions.headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
}
}
} else if (isAzureOpenAI) {
clientOptions.azure =
userProvidesKey && userValues?.apiKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure ? clientOptions.azure.azureOpenAIApiKey : undefined;
if (shouldUseEntraId()) {
clientOptions.headers = {
...clientOptions.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
} else {
apiKey = clientOptions.azure ? clientOptions.azure.azureOpenAIApiKey : undefined;
}
}
if (userProvidesKey && !apiKey) {

View file

@ -1,5 +1,7 @@
import { isEnabled } from './common';
import type { AzureOptions, GenericClient } from '~/types';
import { DefaultAzureCredential } from '@azure/identity';
import { logger } from '@librechat/data-schemas';
/**
* Sanitizes the model name to be used in the URL by removing or replacing disallowed characters.
@ -118,3 +120,43 @@ export function constructAzureURL({
return finalURL;
}
/**
* Checks if Entra ID authentication should be used based on environment variables.
* @returns {boolean} True if Entra ID authentication should be used
*/
export const shouldUseEntraId = (): boolean => {
return process.env.AZURE_OPENAI_USE_ENTRA_ID === 'true';
};
/**
* Creates an Azure credential for Entra ID authentication.
* Uses DefaultAzureCredential which supports multiple authentication methods:
* - Managed Identity (when running in Azure)
* - Service Principal (when environment variables are set)
* - Azure CLI (for local development)
* - Visual Studio Code (for local development)
*
* @returns DefaultAzureCredential instance
*/
export const createEntraIdCredential = (): DefaultAzureCredential => {
return new DefaultAzureCredential();
};
/**
* Gets the access token for Entra ID authentication from azure/identity.
* @returns {Promise<AccessToken>} The access token
*/
export const getEntraIdAccessToken = async (): Promise<string> => {
try {
const credential = createEntraIdCredential();
const tokenResponse = await credential.getToken('https://cognitiveservices.azure.com/.default');
return tokenResponse.token;
} catch (error) {
logger.error('[ENTRA_ID_DEBUG] Failed to get Entra ID access token:', error);
throw error;
}
};

View file

@ -842,3 +842,33 @@ describe('mapGroupToAzureConfig', () => {
}).toThrow(`Group named "${groupName}" not found in configuration.`);
});
});
describe('Entra ID Authentication', () => {
it('should handle Entra ID placeholder in Azure configuration', () => {
const configs = [
{
group: 'entra-id-group',
apiKey: 'entra-id-placeholder',
instanceName: 'entra-instance',
deploymentName: 'entra-deployment',
version: '2024-12-01-preview',
models: {
'gpt-4': {
deploymentName: 'gpt-4-deployment',
version: '2024-12-01-preview',
},
},
},
];
const { isValid, modelNames, modelGroupMap, groupMap } = validateAzureGroups(configs);
expect(isValid).toBe(true);
expect(modelNames).toEqual(['gpt-4']);
const { azureOptions } = mapModelToAzureConfig({
modelName: 'gpt-4',
modelGroupMap,
groupMap,
});
expect(azureOptions.azureOpenAIApiKey).toBe('entra-id-placeholder');
});
});