🔐 feat: Implement Entra ID authentication for Azure OpenAI integration

- Added support for Entra ID authentication in OpenAIClient and related services.
- Updated header management to conditionally use Entra ID access tokens or API keys based on environment configuration.
- Introduced utility functions for Entra ID token retrieval and credential management.
- Enhanced tests to verify Entra ID authentication flow and its integration with Azure configurations.
This commit is contained in:
victorbjorkgren 2025-09-12 17:29:43 +02:00 committed by victorbjor
parent a1471c2f37
commit 9288e84454
9 changed files with 212 additions and 18 deletions

View file

@ -1,4 +1,4 @@
const { isUserProvided } = require('@librechat/api');
const { isUserProvided, shouldUseEntraId } = require('@librechat/api');
const { EModelEndpoint } = require('librechat-data-provider');
const { generateConfig } = require('~/server/utils/handleText');
@ -17,23 +17,28 @@ const {
AZURE_ASSISTANTS_BASE_URL,
} = process.env ?? {};
// Note: For Entra ID, we can't determine the actual token here since this is synchronous
// The actual token will be obtained in the initialize functions.
// Still we need to set a placeholder token to avoid errors.
const finalAzureOpenAIApiKey = shouldUseEntraId() ? 'entra-id-placeholder' : azureOpenAIApiKey;
const useAzurePlugins = !!PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins
? isUserProvided(azureOpenAIApiKey)
? isUserProvided(finalAzureOpenAIApiKey)
: isUserProvided(openAIApiKey);
module.exports = {
config: {
openAIApiKey,
azureOpenAIApiKey,
azureOpenAIApiKey: finalAzureOpenAIApiKey,
useAzurePlugins,
userProvidedOpenAI,
googleKey,
[EModelEndpoint.anthropic]: generateConfig(anthropicApiKey),
[EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken),
[EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY),
[EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL),
[EModelEndpoint.azureOpenAI]: generateConfig(finalAzureOpenAIApiKey, AZURE_OPENAI_BASEURL),
[EModelEndpoint.assistants]: generateConfig(
assistantsApiKey,
ASSISTANTS_BASE_URL,

View file

@ -1,6 +1,12 @@
const OpenAI = require('openai');
const { ProxyAgent } = require('undici');
const { constructAzureURL, isUserProvided, resolveHeaders } = require('@librechat/api');
const {
constructAzureURL,
isUserProvided,
resolveHeaders,
shouldUseEntraId,
getEntraIdAccessToken,
} = require('@librechat/api');
const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider');
const {
checkUserKeyExpiry,
@ -108,12 +114,19 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
azureOptions,
});
apiKey = azureOptions.azureOpenAIApiKey;
// For Entra ID, we need to get the actual access token
if (shouldUseEntraId()) {
apiKey = 'entra-id-placeholder';
headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
} else {
apiKey = azureOptions.azureOpenAIApiKey;
headers['api-key'] = apiKey;
}
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
opts.defaultHeaders = resolveHeaders({
headers: {
...headers,
'api-key': apiKey,
'OpenAI-Beta': `assistants=${version}`,
},
user: req.user,
@ -137,7 +150,11 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
if (shouldUseEntraId()) {
clientOptions.headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
} else {
clientOptions.headers['api-key'] = apiKey;
}
}
}
}

View file

@ -6,6 +6,8 @@ const {
getOpenAIConfig,
getAzureCredentials,
createHandleLLMNewToken,
shouldUseEntraId,
getEntraIdAccessToken,
} = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const OpenAIClient = require('~/app/clients/OpenAIClient');
@ -106,11 +108,25 @@ const initializeClient = async ({
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
if (shouldUseEntraId()) {
clientOptions.headers = {
...clientOptions.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
} else {
clientOptions.headers['api-key'] = apiKey;
}
}
} else if (isAzureOpenAI) {
clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey;
if (shouldUseEntraId()) {
clientOptions.headers = {
...clientOptions.headers,
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
};
apiKey = 'entra-id-placeholder';
}
}
/** @type {undefined | TBaseEndpoint} */

View file

@ -428,4 +428,29 @@ describe('initializeClient', () => {
expect(result.openAIApiKey).toBe('test');
expect(result.client.options.reverseProxyUrl).toBe('https://user-provided-url.com');
});
test('should use Entra ID authentication when AZURE_OPENAI_USE_ENTRA_ID is enabled', async () => {
process.env.AZURE_OPENAI_USE_ENTRA_ID = 'true';
process.env.AZURE_API_KEY = 'test-azure-api-key';
process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'test-instance';
process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'test-deployment';
process.env.AZURE_OPENAI_API_VERSION = '2024-12-01-preview';
const req = {
body: {
key: null,
endpoint: EModelEndpoint.azureOpenAI,
model: 'gpt-4-vision-preview',
},
user: { id: '123' },
app: { locals: {} },
config: mockAppConfig,
};
const res = {};
const endpointOption = {};
const result = await initializeClient({ req, res, endpointOption });
expect(result.openAIApiKey).toBeTruthy();
});
});