LibreChat/api/server/services/Config/loadAsyncEndpoints.js
Danny Avila 097a978e5b
🅰️ feat: Azure Config to Allow Different Deployments per Model (#1863)
* wip: first pass for azure endpoint schema

* refactor: azure config to return groupMap and modelConfigMap

* wip: naming and schema changes

* refactor(errorsToString): move to data-provider

* feat: rename to azureGroups, add additional tests, tests all expected outcomes, return errors

* feat(AppService): load Azure groups

* refactor(azure): use imported types, write `mapModelToAzureConfig`

* refactor: move `extractEnvVariable` to data-provider

* refactor(validateAzureGroups): throw on duplicate groups or models; feat(mapModelToAzureConfig): throw if env vars not present, add tests

* refactor(AppService): ensure each model is properly configured on startup

* refactor: deprecate azureOpenAI environment variables in favor of librechat.yaml config

* feat: use helper functions to handle and order enabled/default endpoints; initialize azureOpenAI from config file

* refactor: redefine types as well as load azureOpenAI models from config file

* chore(ci): fix test description naming

* feat(azureOpenAI): use validated model grouping for request authentication

* chore: bump data-provider following rebase

* chore: bump config file version noting significant changes

* feat: add title options and switch azure configs for titling and vision requests

* feat: enable azure plugins from config file

* fix(ci): pass tests

* chore(.env.example): mark `PLUGINS_USE_AZURE` as deprecated

* fix(fetchModels): early return if apiKey not passed

* chore: fix azure config typing

* refactor(mapModelToAzureConfig): return baseURL and headers as well as azureOptions

* feat(createLLM): use `azureOpenAIBasePath`

* feat(parsers): resolveHeaders

* refactor(extractBaseURL): handle invalid input

* feat(OpenAIClient): handle headers and baseURL for azureConfig

* fix(ci): pass `OpenAIClient` tests

* chore: extract env var for azureOpenAI group config, baseURL

* docs: azureOpenAI config setup docs

* feat: safe check of potential conflicting env vars that map to unique placeholders

* fix: reset apiKey when model switches from originally requested model (vision or title)

* chore: linting

* docs: CONFIG_PATH notes in custom_config.md
2024-02-26 14:12:25 -05:00

54 lines
1.5 KiB
JavaScript

const { EModelEndpoint } = require('librechat-data-provider');
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
const { availableTools } = require('~/app/clients/tools');
const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, googleKey } =
require('./EndpointService').config;
/**
* Load async endpoints and return a configuration object
* @param {Express.Request} req - The request object
*/
async function loadAsyncEndpoints(req) {
let i = 0;
let serviceKey, googleUserProvides;
try {
serviceKey = require('~/data/auth.json');
} catch (e) {
if (i === 0) {
i++;
}
}
if (googleKey === 'user_provided') {
googleUserProvides = true;
if (i <= 1) {
i++;
}
}
const tools = await addOpenAPISpecs(availableTools);
function transformToolsToMap(tools) {
return tools.reduce((map, obj) => {
map[obj.pluginKey] = obj.name;
return map;
}, {});
}
const plugins = transformToolsToMap(tools);
const google = serviceKey || googleKey ? { userProvide: googleUserProvides } : false;
const useAzure = req.app.locals[EModelEndpoint.azureOpenAI]?.plugins;
const gptPlugins =
useAzure || openAIApiKey || azureOpenAIApiKey
? {
plugins,
availableAgents: ['classic', 'functions'],
userProvide: useAzure ? false : userProvidedOpenAI,
azure: useAzurePlugins || useAzure,
}
: false;
return { google, gptPlugins };
}
module.exports = loadAsyncEndpoints;