mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 00:40:14 +01:00
refactor: Speed up Config fetching and Setup Config Groundwork 👷🚧 (#1297)
* refactor: move endpoint services to own directory * refactor: make endpointconfig handling more concise, separate logic, and cache result for subsequent serving * refactor: ModelController gets same treatment as EndpointController, draft OverrideController * wip: flesh out override controller more to return real value * refactor: client/api changes in anticipation of override
This commit is contained in:
parent
9b2359fc27
commit
0bae503a0a
27 changed files with 405 additions and 138 deletions
|
|
@ -1,95 +1,17 @@
|
|||
const { EModelEndpoint } = require('~/server/routes/endpoints/schemas');
|
||||
const { availableTools } = require('~/app/clients/tools');
|
||||
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
|
||||
const {
|
||||
openAIApiKey,
|
||||
azureOpenAIApiKey,
|
||||
useAzurePlugins,
|
||||
userProvidedOpenAI,
|
||||
palmKey,
|
||||
openAI,
|
||||
// assistant,
|
||||
azureOpenAI,
|
||||
bingAI,
|
||||
chatGPTBrowser,
|
||||
anthropic,
|
||||
} = require('~/server/services/EndpointService').config;
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { CacheKeys } = require('~/common/enums');
|
||||
const { loadDefaultEndpointsConfig } = require('~/server/services/Config');
|
||||
|
||||
let i = 0;
|
||||
async function endpointController(req, res) {
|
||||
let key, palmUser;
|
||||
try {
|
||||
key = require('~/data/auth.json');
|
||||
} catch (e) {
|
||||
if (i === 0) {
|
||||
i++;
|
||||
}
|
||||
const cache = getLogStores(CacheKeys.CONFIG);
|
||||
const config = await cache.get(CacheKeys.DEFAULT_CONFIG);
|
||||
if (config) {
|
||||
res.send(config);
|
||||
return;
|
||||
}
|
||||
|
||||
if (palmKey === 'user_provided') {
|
||||
palmUser = true;
|
||||
if (i <= 1) {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await addOpenAPISpecs(availableTools);
|
||||
function transformToolsToMap(tools) {
|
||||
return tools.reduce((map, obj) => {
|
||||
map[obj.pluginKey] = obj.name;
|
||||
return map;
|
||||
}, {});
|
||||
}
|
||||
const plugins = transformToolsToMap(tools);
|
||||
|
||||
const google = key || palmUser ? { userProvide: palmUser } : false;
|
||||
|
||||
const gptPlugins =
|
||||
openAIApiKey || azureOpenAIApiKey
|
||||
? {
|
||||
plugins,
|
||||
availableAgents: ['classic', 'functions'],
|
||||
userProvide: userProvidedOpenAI,
|
||||
azure: useAzurePlugins,
|
||||
}
|
||||
: false;
|
||||
|
||||
let enabledEndpoints = [
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.bingAI,
|
||||
EModelEndpoint.chatGPTBrowser,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.anthropic,
|
||||
];
|
||||
|
||||
const endpointsEnv = process.env.ENDPOINTS || '';
|
||||
if (endpointsEnv) {
|
||||
enabledEndpoints = endpointsEnv
|
||||
.split(',')
|
||||
.filter((endpoint) => endpoint?.trim())
|
||||
.map((endpoint) => endpoint.trim());
|
||||
}
|
||||
|
||||
const endpointConfig = {
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.google]: google,
|
||||
[EModelEndpoint.bingAI]: bingAI,
|
||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
};
|
||||
|
||||
const orderedAndFilteredEndpoints = enabledEndpoints.reduce((config, key, index) => {
|
||||
if (endpointConfig[key]) {
|
||||
config[key] = { ...(endpointConfig[key] ?? {}), order: index };
|
||||
}
|
||||
return config;
|
||||
}, {});
|
||||
|
||||
res.send(JSON.stringify(orderedAndFilteredEndpoints));
|
||||
const defaultConfig = await loadDefaultEndpointsConfig();
|
||||
await cache.set(CacheKeys.DEFAULT_CONFIG, defaultConfig);
|
||||
res.send(JSON.stringify(defaultConfig));
|
||||
}
|
||||
|
||||
module.exports = endpointController;
|
||||
|
|
|
|||
|
|
@ -1,35 +1,17 @@
|
|||
const { EModelEndpoint } = require('../routes/endpoints/schemas');
|
||||
const {
|
||||
getOpenAIModels,
|
||||
getChatGPTBrowserModels,
|
||||
getAnthropicModels,
|
||||
} = require('../services/ModelService');
|
||||
|
||||
const { useAzurePlugins } = require('../services/EndpointService').config;
|
||||
|
||||
const fitlerAssistantModels = (str) => {
|
||||
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
|
||||
};
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { CacheKeys } = require('~/common/enums');
|
||||
const { loadDefaultModels } = require('~/server/services/Config');
|
||||
|
||||
async function modelController(req, res) {
|
||||
const openAI = await getOpenAIModels();
|
||||
const azureOpenAI = await getOpenAIModels({ azure: true });
|
||||
const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true });
|
||||
const chatGPTBrowser = getChatGPTBrowserModels();
|
||||
const anthropic = getAnthropicModels();
|
||||
|
||||
res.send(
|
||||
JSON.stringify({
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.assistant]: openAI.filter(fitlerAssistantModels),
|
||||
[EModelEndpoint.google]: ['chat-bison', 'text-bison', 'codechat-bison'],
|
||||
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
|
||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
}),
|
||||
);
|
||||
const cache = getLogStores(CacheKeys.CONFIG);
|
||||
let modelConfig = await cache.get(CacheKeys.MODELS_CONFIG);
|
||||
if (modelConfig) {
|
||||
res.send(modelConfig);
|
||||
return;
|
||||
}
|
||||
modelConfig = await loadDefaultModels();
|
||||
await cache.set(CacheKeys.MODELS_CONFIG, modelConfig);
|
||||
res.send(modelConfig);
|
||||
}
|
||||
|
||||
module.exports = modelController;
|
||||
|
|
|
|||
27
api/server/controllers/OverrideController.js
Normal file
27
api/server/controllers/OverrideController.js
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
const { getLogStores } = require('~/cache');
|
||||
const { CacheKeys } = require('~/common/enums');
|
||||
const { loadOverrideConfig } = require('~/server/services/Config');
|
||||
|
||||
async function overrideController(req, res) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG);
|
||||
let overrideConfig = await cache.get(CacheKeys.OVERRIDE_CONFIG);
|
||||
if (overrideConfig) {
|
||||
res.send(overrideConfig);
|
||||
return;
|
||||
} else if (overrideConfig === false) {
|
||||
res.send(false);
|
||||
return;
|
||||
}
|
||||
overrideConfig = await loadOverrideConfig();
|
||||
const { endpointsConfig, modelsConfig } = overrideConfig;
|
||||
if (endpointsConfig) {
|
||||
await cache.set(CacheKeys.DEFAULT_CONFIG, endpointsConfig);
|
||||
}
|
||||
if (modelsConfig) {
|
||||
await cache.set(CacheKeys.MODELS_CONFIG, modelsConfig);
|
||||
}
|
||||
await cache.set(CacheKeys.OVERRIDE_CONFIG, overrideConfig);
|
||||
res.send(JSON.stringify(overrideConfig));
|
||||
}
|
||||
|
||||
module.exports = overrideController;
|
||||
Loading…
Add table
Add a link
Reference in a new issue