🅰️ feat: Azure Config to Allow Different Deployments per Model (#1863)

* wip: first pass for azure endpoint schema

* refactor: azure config to return groupMap and modelConfigMap

* wip: naming and schema changes

* refactor(errorsToString): move to data-provider

* feat: rename to azureGroups, add additional tests, tests all expected outcomes, return errors

* feat(AppService): load Azure groups

* refactor(azure): use imported types, write `mapModelToAzureConfig`

* refactor: move `extractEnvVariable` to data-provider

* refactor(validateAzureGroups): throw on duplicate groups or models; feat(mapModelToAzureConfig): throw if env vars not present, add tests

* refactor(AppService): ensure each model is properly configured on startup

* refactor: deprecate azureOpenAI environment variables in favor of librechat.yaml config

* feat: use helper functions to handle and order enabled/default endpoints; initialize azureOpenAI from config file

* refactor: redefine types as well as load azureOpenAI models from config file

* chore(ci): fix test description naming

* feat(azureOpenAI): use validated model grouping for request authentication

* chore: bump data-provider following rebase

* chore: bump config file version noting significant changes

* feat: add title options and switch azure configs for titling and vision requests

* feat: enable azure plugins from config file

* fix(ci): pass tests

* chore(.env.example): mark `PLUGINS_USE_AZURE` as deprecated

* fix(fetchModels): early return if apiKey not passed

* chore: fix azure config typing

* refactor(mapModelToAzureConfig): return baseURL and headers as well as azureOptions

* feat(createLLM): use `azureOpenAIBasePath`

* feat(parsers): resolveHeaders

* refactor(extractBaseURL): handle invalid input

* feat(OpenAIClient): handle headers and baseURL for azureConfig

* fix(ci): pass `OpenAIClient` tests

* chore: extract env var for azureOpenAI group config, baseURL

* docs: azureOpenAI config setup docs

* feat: safe check of potential conflicting env vars that map to unique placeholders

* fix: reset apiKey when model switches from originally requested model (vision or title)

* chore: linting

* docs: CONFIG_PATH notes in custom_config.md
This commit is contained in:
Danny Avila 2024-02-26 14:12:25 -05:00 committed by GitHub
parent 7a55132e42
commit 097a978e5b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 2066 additions and 394 deletions

View file

@ -1,10 +1,13 @@
const OpenAI = require('openai');
const { HttpsProxyAgent } = require('https-proxy-agent');
const {
ImageDetail,
EModelEndpoint,
resolveHeaders,
ImageDetailCost,
getResponseSender,
validateVisionModel,
ImageDetailCost,
ImageDetail,
mapModelToAzureConfig,
} = require('librechat-data-provider');
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
const {
@ -665,6 +668,16 @@ class OpenAIClient extends BaseClient {
};
}
const { headers } = this.options;
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
configOptions.baseOptions = {
headers: resolveHeaders({
...headers,
...configOptions?.baseOptions?.headers,
}),
};
}
if (this.options.proxy) {
configOptions.httpAgent = new HttpsProxyAgent(this.options.proxy);
configOptions.httpsAgent = new HttpsProxyAgent(this.options.proxy);
@ -725,6 +738,26 @@ class OpenAIClient extends BaseClient {
max_tokens: 16,
};
/** @type {TAzureConfig | undefined} */
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
if (this.azure && azureConfig) {
const { modelGroupMap, groupMap } = azureConfig;
const {
azureOptions,
baseURL,
headers = {},
} = mapModelToAzureConfig({
modelName: modelOptions.model,
modelGroupMap,
groupMap,
});
this.azure = azureOptions;
this.options.headers = resolveHeaders(headers);
this.options.reverseProxyUrl = baseURL ?? null;
this.langchainProxy = extractBaseURL(this.options.reverseProxyUrl);
this.apiKey = azureOptions.azureOpenAIApiKey;
}
const titleChatCompletion = async () => {
modelOptions.model = model;
@ -975,6 +1008,27 @@ ${convo}
modelOptions.max_tokens = 4000;
}
/** @type {TAzureConfig | undefined} */
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
if (this.azure && this.isVisionModel && azureConfig) {
const { modelGroupMap, groupMap } = azureConfig;
const {
azureOptions,
baseURL,
headers = {},
} = mapModelToAzureConfig({
modelName: modelOptions.model,
modelGroupMap,
groupMap,
});
this.azure = azureOptions;
this.azureEndpoint = genAzureChatCompletion(this.azure, modelOptions.model, this);
opts.defaultHeaders = resolveHeaders(headers);
this.langchainProxy = extractBaseURL(baseURL);
this.apiKey = azureOptions.azureOpenAIApiKey;
}
if (this.azure || this.options.azure) {
// Azure does not accept `model` in the body, so we need to remove it.
delete modelOptions.model;
@ -1026,12 +1080,20 @@ ${convo}
...modelOptions,
...this.options.addParams,
};
logger.debug('[OpenAIClient] chatCompletion: added params', {
addParams: this.options.addParams,
modelOptions,
});
}
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
this.options.dropParams.forEach((param) => {
delete modelOptions[param];
});
logger.debug('[OpenAIClient] chatCompletion: dropped params', {
dropParams: this.options.dropParams,
modelOptions,
});
}
let UnexpectedRoleError = false;