mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 01:10:14 +01:00
* wip: first pass for azure endpoint schema * refactor: azure config to return groupMap and modelConfigMap * wip: naming and schema changes * refactor(errorsToString): move to data-provider * feat: rename to azureGroups, add additional tests, tests all expected outcomes, return errors * feat(AppService): load Azure groups * refactor(azure): use imported types, write `mapModelToAzureConfig` * refactor: move `extractEnvVariable` to data-provider * refactor(validateAzureGroups): throw on duplicate groups or models; feat(mapModelToAzureConfig): throw if env vars not present, add tests * refactor(AppService): ensure each model is properly configured on startup * refactor: deprecate azureOpenAI environment variables in favor of librechat.yaml config * feat: use helper functions to handle and order enabled/default endpoints; initialize azureOpenAI from config file * refactor: redefine types as well as load azureOpenAI models from config file * chore(ci): fix test description naming * feat(azureOpenAI): use validated model grouping for request authentication * chore: bump data-provider following rebase * chore: bump config file version noting significant changes * feat: add title options and switch azure configs for titling and vision requests * feat: enable azure plugins from config file * fix(ci): pass tests * chore(.env.example): mark `PLUGINS_USE_AZURE` as deprecated * fix(fetchModels): early return if apiKey not passed * chore: fix azure config typing * refactor(mapModelToAzureConfig): return baseURL and headers as well as azureOptions * feat(createLLM): use `azureOpenAIBasePath` * feat(parsers): resolveHeaders * refactor(extractBaseURL): handle invalid input * feat(OpenAIClient): handle headers and baseURL for azureConfig * fix(ci): pass `OpenAIClient` tests * chore: extract env var for azureOpenAI group config, baseURL * docs: azureOpenAI config setup docs * feat: safe check of potential conflicting env vars that map to unique placeholders * fix: reset apiKey when model switches from originally requested model (vision or title) * chore: linting * docs: CONFIG_PATH notes in custom_config.md
57 lines
1.7 KiB
JavaScript
57 lines
1.7 KiB
JavaScript
const { EModelEndpoint, extractEnvVariable } = require('librechat-data-provider');
|
|
const { isUserProvided } = require('~/server/utils');
|
|
const getCustomConfig = require('./getCustomConfig');
|
|
|
|
/**
|
|
* Load config endpoints from the cached configuration object
|
|
* @param {Express.Request} req - The request object
|
|
* @returns {Promise<TEndpointsConfig>} A promise that resolves to an object containing the endpoints configuration
|
|
*/
|
|
async function loadConfigEndpoints(req) {
|
|
const customConfig = await getCustomConfig();
|
|
|
|
if (!customConfig) {
|
|
return {};
|
|
}
|
|
|
|
const { endpoints = {} } = customConfig ?? {};
|
|
const endpointsConfig = {};
|
|
|
|
if (Array.isArray(endpoints[EModelEndpoint.custom])) {
|
|
const customEndpoints = endpoints[EModelEndpoint.custom].filter(
|
|
(endpoint) =>
|
|
endpoint.baseURL &&
|
|
endpoint.apiKey &&
|
|
endpoint.name &&
|
|
endpoint.models &&
|
|
(endpoint.models.fetch || endpoint.models.default),
|
|
);
|
|
|
|
for (let i = 0; i < customEndpoints.length; i++) {
|
|
const endpoint = customEndpoints[i];
|
|
const { baseURL, apiKey, name, iconURL, modelDisplayLabel } = endpoint;
|
|
|
|
const resolvedApiKey = extractEnvVariable(apiKey);
|
|
const resolvedBaseURL = extractEnvVariable(baseURL);
|
|
|
|
endpointsConfig[name] = {
|
|
type: EModelEndpoint.custom,
|
|
userProvide: isUserProvided(resolvedApiKey),
|
|
userProvideURL: isUserProvided(resolvedBaseURL),
|
|
modelDisplayLabel,
|
|
iconURL,
|
|
};
|
|
}
|
|
}
|
|
|
|
if (req.app.locals[EModelEndpoint.azureOpenAI]) {
|
|
/** @type {Omit<TConfig, 'order'>} */
|
|
endpointsConfig[EModelEndpoint.azureOpenAI] = {
|
|
userProvide: false,
|
|
};
|
|
}
|
|
|
|
return endpointsConfig;
|
|
}
|
|
|
|
module.exports = loadConfigEndpoints;
|