LibreChat/api/server/services/Endpoints/openAI/initialize.js
Danny Avila a058963a9f
👤 feat: User Placeholder Variables for Custom Endpoint Headers (#7993)
* 🔧 refactor: move `processMCPEnv` from `librechat-data-provider` and move to `@librechat/api`

* 🔧 refactor: Update resolveHeaders import paths

* 🔧 refactor: Enhance resolveHeaders to support user and custom variables

- Updated resolveHeaders function to accept user and custom user variables for placeholder replacement.
- Modified header resolution in multiple client and controller files to utilize the enhanced resolveHeaders functionality.
- Added comprehensive tests for resolveHeaders to ensure correct processing of user and custom variables.

* 🔧 fix: Update user ID placeholder processing in env.ts

* 🔧 fix: Remove arguments passing this.user rather than req.user

- Updated multiple client and controller files to call resolveHeaders without the user parameter

* 🔧 refactor: Enhance processUserPlaceholders to be more readable / less nested

* 🔧 refactor: Update processUserPlaceholders to pass all tests in mpc.spec.ts and env.spec.ts

* chore: remove legacy ChatGPTClient

* chore: remove LLM initialization code

* chore: initial deprecation removal of `gptPlugins`

* chore: remove cohere-ai dependency from package.json and package-lock.json

* chore: update brace-expansion to version 2.0.2 and add license information

* chore: remove PluginsClient test file

* chore: remove legacy

* ci: remove deprecated sendMessage/getCompletion/chatCompletion tests

---------

Co-authored-by: Dustin Healy <54083382+dustinhealy@users.noreply.github.com>
2025-06-23 12:39:27 -04:00

165 lines
4.9 KiB
JavaScript

const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider');
const {
isEnabled,
resolveHeaders,
isUserProvided,
getOpenAIConfig,
getAzureCredentials,
createHandleLLMNewToken,
} = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const OpenAIClient = require('~/app/clients/OpenAIClient');
const initializeClient = async ({
req,
res,
endpointOption,
optionsOnly,
overrideEndpoint,
overrideModel,
}) => {
const {
PROXY,
OPENAI_API_KEY,
AZURE_API_KEY,
OPENAI_REVERSE_PROXY,
AZURE_OPENAI_BASEURL,
OPENAI_SUMMARIZE,
DEBUG_OPENAI,
} = process.env;
const { key: expiresAt } = req.body;
const modelName = overrideModel ?? req.body.model;
const endpoint = overrideEndpoint ?? req.body.endpoint;
const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null;
const credentials = {
[EModelEndpoint.openAI]: OPENAI_API_KEY,
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
};
const baseURLOptions = {
[EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY,
[EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL,
};
const userProvidesKey = isUserProvided(credentials[endpoint]);
const userProvidesURL = isUserProvided(baseURLOptions[endpoint]);
let userValues = null;
if (expiresAt && (userProvidesKey || userProvidesURL)) {
checkUserKeyExpiry(expiresAt, endpoint);
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
}
let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint];
let baseURL = userProvidesURL ? userValues?.baseURL : baseURLOptions[endpoint];
let clientOptions = {
contextStrategy,
proxy: PROXY ?? null,
debug: isEnabled(DEBUG_OPENAI),
reverseProxyUrl: baseURL ? baseURL : null,
...endpointOption,
};
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
/** @type {false | TAzureConfig} */
const azureConfig = isAzureOpenAI && req.app.locals[EModelEndpoint.azureOpenAI];
if (isAzureOpenAI && azureConfig) {
const { modelGroupMap, groupMap } = azureConfig;
const {
azureOptions,
baseURL,
headers = {},
serverless,
} = mapModelToAzureConfig({
modelName,
modelGroupMap,
groupMap,
});
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
clientOptions.headers = resolveHeaders(
{ ...headers, ...(clientOptions.headers ?? {}) },
req.user,
);
clientOptions.titleConvo = azureConfig.titleConvo;
clientOptions.titleModel = azureConfig.titleModel;
const azureRate = modelName.includes('gpt-4') ? 30 : 17;
clientOptions.streamRate = azureConfig.streamRate ?? azureRate;
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
const groupName = modelGroupMap[modelName].group;
clientOptions.addParams = azureConfig.groupMap[groupName].addParams;
clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams;
clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt;
apiKey = azureOptions.azureOpenAIApiKey;
clientOptions.azure = !serverless && azureOptions;
if (serverless === true) {
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
? { 'api-version': azureOptions.azureOpenAIApiVersion }
: undefined;
clientOptions.headers['api-key'] = apiKey;
}
} else if (isAzureOpenAI) {
clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey;
}
/** @type {undefined | TBaseEndpoint} */
const openAIConfig = req.app.locals[EModelEndpoint.openAI];
if (!isAzureOpenAI && openAIConfig) {
clientOptions.streamRate = openAIConfig.streamRate;
clientOptions.titleModel = openAIConfig.titleModel;
}
/** @type {undefined | TBaseEndpoint} */
const allConfig = req.app.locals.all;
if (allConfig) {
clientOptions.streamRate = allConfig.streamRate;
}
if (userProvidesKey & !apiKey) {
throw new Error(
JSON.stringify({
type: ErrorTypes.NO_USER_KEY,
}),
);
}
if (!apiKey) {
throw new Error(`${endpoint} API Key not provided.`);
}
if (optionsOnly) {
const modelOptions = endpointOption.model_parameters;
modelOptions.model = modelName;
clientOptions = Object.assign({ modelOptions }, clientOptions);
clientOptions.modelOptions.user = req.user.id;
const options = getOpenAIConfig(apiKey, clientOptions);
const streamRate = clientOptions.streamRate;
if (!streamRate) {
return options;
}
options.llmConfig.callbacks = [
{
handleLLMNewToken: createHandleLLMNewToken(streamRate),
},
];
return options;
}
const client = new OpenAIClient(apiKey, Object.assign({ req, res }, clientOptions));
return {
client,
openAIApiKey: apiKey,
};
};
module.exports = initializeClient;