mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 01:10:14 +01:00
* chore: rename dir from `assistant` to plural * feat: `assistants` field for azure config, spread options in AppService * refactor: rename constructAzureURL param for azure as `azureOptions` * chore: bump openai and bun * chore(loadDefaultModels): change naming of assistant -> assistants * feat: load azure settings with currect baseURL for assistants' initializeClient * refactor: add `assistants` flags to groups and model configs, add mapGroupToAzureConfig * feat(loadConfigEndpoints): initialize assistants endpoint if azure flag `assistants` is enabled * feat(AppService): determine assistant models on startup, throw Error if none * refactor(useDeleteAssistantMutation): send model along with assistant id for delete mutations * feat: support listing and deleting assistants with azure * feat: add model query to assistant avatar upload * feat: add azure support for retrieveRun method * refactor: update OpenAIClient initialization * chore: update README * fix(ci): tests passing * refactor(uploadOpenAIFile): improve logging and use more efficient REST API method * refactor(useFileHandling): add model to metadata to target Azure region compatible with current model * chore(files): add azure naming pattern for valid file id recognition * fix(assistants): initialize openai with first available assistant model if none provided * refactor(uploadOpenAIFile): add content type for azure, initialize formdata before azure options * refactor(sleep): move sleep function out of Runs and into `~/server/utils` * fix(azureOpenAI/assistants): make sure to only overwrite models with assistant models if `assistants` flag is enabled * refactor(uploadOpenAIFile): revert to old method * chore(uploadOpenAIFile): use enum for file purpose * docs: azureOpenAI update guide with more info, examples * feat: enable/disable assistant capabilities and specify retrieval models * refactor: optional chain conditional statement in loadConfigModels.js * docs: add assistants examples * chore: update librechat.example.yaml * docs(azure): update note of file upload behavior in Azure OpenAI Assistants * chore: update docs and add descriptive message about assistant errors * fix: prevent message submission with invalid assistant or if files loading * style: update Landing icon & text when assistant is not selected * chore: bump librechat-data-provider to 0.4.8 * fix(assistants/azure): assign req.body.model for proper azure init to abort runs
148 lines
4.1 KiB
JavaScript
148 lines
4.1 KiB
JavaScript
const OpenAI = require('openai');
|
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
|
const {
|
|
EModelEndpoint,
|
|
resolveHeaders,
|
|
mapModelToAzureConfig,
|
|
} = require('librechat-data-provider');
|
|
const {
|
|
getUserKey,
|
|
getUserKeyExpiry,
|
|
checkUserKeyExpiry,
|
|
} = require('~/server/services/UserService');
|
|
const OpenAIClient = require('~/app/clients/OpenAIClient');
|
|
const { isUserProvided } = require('~/server/utils');
|
|
const { constructAzureURL } = require('~/utils');
|
|
|
|
const initializeClient = async ({ req, res, endpointOption, initAppClient = false }) => {
|
|
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
|
|
|
|
const userProvidesKey = isUserProvided(ASSISTANTS_API_KEY);
|
|
const userProvidesURL = isUserProvided(ASSISTANTS_BASE_URL);
|
|
|
|
let userValues = null;
|
|
if (userProvidesKey || userProvidesURL) {
|
|
const expiresAt = await getUserKeyExpiry({
|
|
userId: req.user.id,
|
|
name: EModelEndpoint.assistants,
|
|
});
|
|
checkUserKeyExpiry(
|
|
expiresAt,
|
|
'Your Assistants API key has expired. Please provide your API key again.',
|
|
);
|
|
userValues = await getUserKey({ userId: req.user.id, name: EModelEndpoint.assistants });
|
|
try {
|
|
userValues = JSON.parse(userValues);
|
|
} catch (e) {
|
|
throw new Error(
|
|
'Invalid JSON provided for Assistants API user values. Please provide them again.',
|
|
);
|
|
}
|
|
}
|
|
|
|
let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY;
|
|
let baseURL = userProvidesURL ? userValues.baseURL : ASSISTANTS_BASE_URL;
|
|
|
|
const opts = {};
|
|
|
|
const clientOptions = {
|
|
reverseProxyUrl: baseURL ?? null,
|
|
proxy: PROXY ?? null,
|
|
req,
|
|
res,
|
|
...endpointOption,
|
|
};
|
|
|
|
/** @type {TAzureConfig | undefined} */
|
|
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
|
|
|
/** @type {AzureOptions | undefined} */
|
|
let azureOptions;
|
|
|
|
if (azureConfig && azureConfig.assistants) {
|
|
const { modelGroupMap, groupMap, assistantModels } = azureConfig;
|
|
const modelName = req.body.model ?? req.query.model ?? assistantModels[0];
|
|
const {
|
|
azureOptions: currentOptions,
|
|
baseURL: azureBaseURL,
|
|
headers = {},
|
|
serverless,
|
|
} = mapModelToAzureConfig({
|
|
modelName,
|
|
modelGroupMap,
|
|
groupMap,
|
|
});
|
|
|
|
azureOptions = currentOptions;
|
|
|
|
baseURL = constructAzureURL({
|
|
baseURL: azureBaseURL ?? 'https://${INSTANCE_NAME}.openai.azure.com/openai',
|
|
azureOptions,
|
|
});
|
|
|
|
apiKey = azureOptions.azureOpenAIApiKey;
|
|
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
|
|
opts.defaultHeaders = resolveHeaders({ ...headers, 'api-key': apiKey });
|
|
opts.model = azureOptions.azureOpenAIApiDeploymentName;
|
|
|
|
if (initAppClient) {
|
|
clientOptions.titleConvo = azureConfig.titleConvo;
|
|
clientOptions.titleModel = azureConfig.titleModel;
|
|
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
|
|
|
|
const groupName = modelGroupMap[modelName].group;
|
|
clientOptions.addParams = azureConfig.groupMap[groupName].addParams;
|
|
clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams;
|
|
clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt;
|
|
|
|
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
|
clientOptions.headers = opts.defaultHeaders;
|
|
clientOptions.azure = !serverless && azureOptions;
|
|
}
|
|
}
|
|
|
|
if (!apiKey) {
|
|
throw new Error('Assistants API key not provided. Please provide it again.');
|
|
}
|
|
|
|
if (baseURL) {
|
|
opts.baseURL = baseURL;
|
|
}
|
|
|
|
if (PROXY) {
|
|
opts.httpAgent = new HttpsProxyAgent(PROXY);
|
|
}
|
|
|
|
if (OPENAI_ORGANIZATION) {
|
|
opts.organization = OPENAI_ORGANIZATION;
|
|
}
|
|
|
|
/** @type {OpenAIClient} */
|
|
const openai = new OpenAI({
|
|
apiKey,
|
|
...opts,
|
|
});
|
|
|
|
openai.req = req;
|
|
openai.res = res;
|
|
|
|
if (azureOptions) {
|
|
openai.locals = { ...(openai.locals ?? {}), azureOptions };
|
|
}
|
|
|
|
if (endpointOption && initAppClient) {
|
|
const client = new OpenAIClient(apiKey, clientOptions);
|
|
return {
|
|
client,
|
|
openai,
|
|
openAIApiKey: apiKey,
|
|
};
|
|
}
|
|
|
|
return {
|
|
openai,
|
|
openAIApiKey: apiKey,
|
|
};
|
|
};
|
|
|
|
module.exports = initializeClient;
|