👤 feat: User ID in Model Query; chore: cleanup ModelService (#1753)

* feat: send the LibreChat user ID as a query param when fetching the list of models

* chore: update bun

* chore: change bun command for building data-provider

* refactor: prefer use of `getCustomConfig` to access custom config, also move to `server/services/Config`

* refactor: make endpoints/custom option for the config optional, add userIdQuery, and use modelQueries log store in ModelService

* refactor(ModelService): use env variables at runtime, use default models from data-provider, and add tests

* docs: add `userIdQuery`

* fix(ci): import changed
This commit is contained in:
Danny Avila 2024-02-08 10:06:58 -05:00 committed by GitHub
parent d06e5d2e02
commit ff057152e2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 339 additions and 83 deletions

View file

@ -0,0 +1,25 @@
const { CacheKeys } = require('librechat-data-provider');
const loadCustomConfig = require('./loadCustomConfig');
const getLogStores = require('~/cache/getLogStores');
/**
* Retrieves the configuration object
* @function getCustomConfig
* @returns {Promise<TCustomConfig | null>}
* */
async function getCustomConfig() {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG);
if (!customConfig) {
customConfig = await loadCustomConfig();
}
if (!customConfig) {
return null;
}
return customConfig;
}
module.exports = getCustomConfig;

View file

@ -1,4 +1,5 @@
const { config } = require('./EndpointService');
const getCustomConfig = require('./getCustomConfig');
const loadCustomConfig = require('./loadCustomConfig');
const loadConfigModels = require('./loadConfigModels');
const loadDefaultModels = require('./loadDefaultModels');
@ -9,6 +10,7 @@ const loadDefaultEndpointsConfig = require('./loadDefaultEConfig');
module.exports = {
config,
getCustomConfig,
loadCustomConfig,
loadConfigModels,
loadDefaultModels,

View file

@ -1,18 +1,12 @@
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
const { EModelEndpoint } = require('librechat-data-provider');
const { isUserProvided, extractEnvVariable } = require('~/server/utils');
const loadCustomConfig = require('./loadCustomConfig');
const { getLogStores } = require('~/cache');
const getCustomConfig = require('./getCustomConfig');
/**
* Load config endpoints from the cached configuration object
* @function loadConfigEndpoints */
async function loadConfigEndpoints() {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG);
if (!customConfig) {
customConfig = await loadCustomConfig();
}
const customConfig = await getCustomConfig();
if (!customConfig) {
return {};

View file

@ -1,19 +1,15 @@
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
const { EModelEndpoint } = require('librechat-data-provider');
const { isUserProvided, extractEnvVariable } = require('~/server/utils');
const { fetchModels } = require('~/server/services/ModelService');
const loadCustomConfig = require('./loadCustomConfig');
const { getLogStores } = require('~/cache');
const getCustomConfig = require('./getCustomConfig');
/**
* Load config endpoints from the cached configuration object
* @function loadConfigModels */
async function loadConfigModels() {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG);
if (!customConfig) {
customConfig = await loadCustomConfig();
}
* @function loadConfigModels
* @param {Express.Request} req - The Express request object.
*/
async function loadConfigModels(req) {
const customConfig = await getCustomConfig();
if (!customConfig) {
return {};
@ -49,7 +45,14 @@ async function loadConfigModels() {
if (models.fetch && !isUserProvided(API_KEY) && !isUserProvided(BASE_URL)) {
fetchPromisesMap[BASE_URL] =
fetchPromisesMap[BASE_URL] || fetchModels({ baseURL: BASE_URL, apiKey: API_KEY, name });
fetchPromisesMap[BASE_URL] ||
fetchModels({
user: req.user.id,
baseURL: BASE_URL,
apiKey: API_KEY,
name,
userIdQuery: models.userIdQuery,
});
baseUrlToNameMap[BASE_URL] = baseUrlToNameMap[BASE_URL] || [];
baseUrlToNameMap[BASE_URL].push(name);
continue;

View file

@ -17,6 +17,7 @@ const configPath = path.resolve(projectRoot, 'librechat.yaml');
async function loadCustomConfig() {
const customConfig = loadYaml(configPath);
if (!customConfig) {
logger.info('Custom config file missing or YAML format invalid.');
return null;
}
@ -25,7 +26,7 @@ async function loadCustomConfig() {
logger.error(`Invalid custom config file at ${configPath}`, result.error);
return null;
} else {
logger.info('Loaded custom config file:');
logger.info('Custom config file loaded:');
logger.info(JSON.stringify(customConfig, null, 2));
}

View file

@ -11,13 +11,23 @@ const fitlerAssistantModels = (str) => {
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
};
async function loadDefaultModels() {
/**
* Loads the default models for the application.
* @async
* @function
* @param {Express.Request} req - The Express request object.
*/
async function loadDefaultModels(req) {
const google = getGoogleModels();
const openAI = await getOpenAIModels();
const openAI = await getOpenAIModels({ user: req.user.id });
const anthropic = getAnthropicModels();
const chatGPTBrowser = getChatGPTBrowserModels();
const azureOpenAI = await getOpenAIModels({ azure: true });
const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true });
const azureOpenAI = await getOpenAIModels({ user: req.user.id, azure: true });
const gptPlugins = await getOpenAIModels({
user: req.user.id,
azure: useAzurePlugins,
plugins: true,
});
return {
[EModelEndpoint.openAI]: openAI,