WIP: app.locals refactoring

WIP: appConfig

fix: update memory configuration retrieval to use getAppConfig based on user role

fix: update comment for AppConfig interface to clarify purpose
This commit is contained in:
Danny Avila 2025-08-05 18:09:25 -04:00
parent 5a14ee9c6a
commit b992fed16c
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
66 changed files with 706 additions and 366 deletions

View file

@ -16,6 +16,7 @@ const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getProviderConfig } = require('~/server/services/Endpoints');
const { processFiles } = require('~/server/services/Files/process');
const { getFiles, getToolFilesByIds } = require('~/models/File');
const { getAppConfig } = require('~/server/services/Config');
const { getConvoFiles } = require('~/models/Conversation');
const { getModelMaxTokens } = require('~/utils');
@ -43,6 +44,7 @@ const initializeAgent = async ({
allowedProviders,
isInitialAgent = false,
}) => {
const appConfig = await getAppConfig({ role: req.user?.role });
if (
isAgentsEndpoint(endpointOption?.endpoint) &&
allowedProviders.size > 0 &&
@ -84,10 +86,11 @@ const initializeAgent = async ({
const { attachments, tool_resources } = await primeResources({
req,
getFiles,
appConfig,
agentId: agent.id,
attachments: currentFiles,
tool_resources: agent.tool_resources,
requestFileSet: new Set(requestFiles?.map((file) => file.file_id)),
agentId: agent.id,
});
const provider = agent.provider;

View file

@ -11,9 +11,9 @@ const {
createToolEndCallback,
getDefaultHandlers,
} = require('~/server/controllers/agents/callbacks');
const { getCustomEndpointConfig, getAppConfig } = require('~/server/services/Config');
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { loadAgentTools } = require('~/server/services/ToolService');
const AgentClient = require('~/server/controllers/agents/client');
const { getAgent } = require('~/models/Agent');
@ -50,6 +50,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
if (!endpointOption) {
throw new Error('Endpoint option not provided');
}
const appConfig = await getAppConfig({ role: req.user?.role });
// TODO: use endpointOption to determine options/modelOptions
/** @type {Array<UsageMetadata>} */
@ -90,7 +91,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
const agentConfigs = new Map();
/** @type {Set<string>} */
const allowedProviders = new Set(req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders);
const allowedProviders = new Set(appConfig?.[EModelEndpoint.agents]?.allowedProviders);
const loadTools = createToolLoader();
/** @type {Array<MongoFile>} */
@ -144,7 +145,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
}
}
let endpointConfig = req.app.locals[primaryConfig.endpoint];
let endpointConfig = appConfig[primaryConfig.endpoint];
if (!isAgentsEndpoint(primaryConfig.endpoint) && !endpointConfig) {
try {
endpointConfig = await getCustomEndpointConfig(primaryConfig.endpoint);

View file

@ -2,8 +2,10 @@ const { EModelEndpoint } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
const AnthropicClient = require('~/app/clients/AnthropicClient');
const { getAppConfig } = require('~/server/services/Config');
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
const appConfig = await getAppConfig({ role: req.user?.role });
const { ANTHROPIC_API_KEY, ANTHROPIC_REVERSE_PROXY, PROXY } = process.env;
const expiresAt = req.body.key;
const isUserProvided = ANTHROPIC_API_KEY === 'user_provided';
@ -23,7 +25,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
let clientOptions = {};
/** @type {undefined | TBaseEndpoint} */
const anthropicConfig = req.app.locals[EModelEndpoint.anthropic];
const anthropicConfig = appConfig[EModelEndpoint.anthropic];
if (anthropicConfig) {
clientOptions.streamRate = anthropicConfig.streamRate;
@ -31,7 +33,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
}
/** @type {undefined | TBaseEndpoint} */
const allConfig = req.app.locals.all;
const allConfig = appConfig.all;
if (allConfig) {
clientOptions.streamRate = allConfig.streamRate;
}

View file

@ -7,6 +7,7 @@ const {
getUserKeyValues,
getUserKeyExpiry,
} = require('~/server/services/UserService');
const { getAppConfig } = require('~/server/services/Config');
const OAIClient = require('~/app/clients/OpenAIClient');
class Files {
@ -48,6 +49,7 @@ class Files {
}
const initializeClient = async ({ req, res, version, endpointOption, initAppClient = false }) => {
const appConfig = await getAppConfig({ role: req.user?.role });
const { PROXY, OPENAI_ORGANIZATION, AZURE_ASSISTANTS_API_KEY, AZURE_ASSISTANTS_BASE_URL } =
process.env;
@ -81,7 +83,7 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
};
/** @type {TAzureConfig | undefined} */
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
const azureConfig = appConfig[EModelEndpoint.azureOpenAI];
/** @type {AzureOptions | undefined} */
let azureOptions;

View file

@ -9,8 +9,10 @@ const {
removeNullishValues,
} = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getAppConfig } = require('~/server/services/Config');
const getOptions = async ({ req, overrideModel, endpointOption }) => {
const appConfig = await getAppConfig({ role: req.user?.role });
const {
BEDROCK_AWS_SECRET_ACCESS_KEY,
BEDROCK_AWS_ACCESS_KEY_ID,
@ -50,14 +52,14 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
let streamRate = Constants.DEFAULT_STREAM_RATE;
/** @type {undefined | TBaseEndpoint} */
const bedrockConfig = req.app.locals[EModelEndpoint.bedrock];
const bedrockConfig = appConfig[EModelEndpoint.bedrock];
if (bedrockConfig && bedrockConfig.streamRate) {
streamRate = bedrockConfig.streamRate;
}
/** @type {undefined | TBaseEndpoint} */
const allConfig = req.app.locals.all;
const allConfig = appConfig.all;
if (allConfig && allConfig.streamRate) {
streamRate = allConfig.streamRate;
}

View file

@ -8,7 +8,7 @@ const {
const { Providers } = require('@librechat/agents');
const { getOpenAIConfig, createHandleLLMNewToken, resolveHeaders } = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { getCustomEndpointConfig, getAppConfig } = require('~/server/services/Config');
const { fetchModels } = require('~/server/services/ModelService');
const OpenAIClient = require('~/app/clients/OpenAIClient');
const { isUserProvided } = require('~/server/utils');
@ -17,6 +17,7 @@ const getLogStores = require('~/cache/getLogStores');
const { PROXY } = process.env;
const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrideEndpoint }) => {
const appConfig = await getAppConfig({ role: req.user?.role });
const { key: expiresAt } = req.body;
const endpoint = overrideEndpoint ?? req.body.endpoint;
@ -118,7 +119,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
};
/** @type {undefined | TBaseEndpoint} */
const allConfig = req.app.locals.all;
const allConfig = appConfig.all;
if (allConfig) {
customOptions.streamRate = allConfig.streamRate;
}

View file

@ -2,6 +2,7 @@ const path = require('path');
const { EModelEndpoint, AuthKeys } = require('librechat-data-provider');
const { getGoogleConfig, isEnabled, loadServiceKey } = require('@librechat/api');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getAppConfig } = require('~/server/services/Config');
const { GoogleClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
@ -46,10 +47,11 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
let clientOptions = {};
const appConfig = await getAppConfig({ role: req.user?.role });
/** @type {undefined | TBaseEndpoint} */
const allConfig = req.app.locals.all;
const allConfig = appConfig.all;
/** @type {undefined | TBaseEndpoint} */
const googleConfig = req.app.locals[EModelEndpoint.google];
const googleConfig = appConfig[EModelEndpoint.google];
if (googleConfig) {
clientOptions.streamRate = googleConfig.streamRate;

View file

@ -1,7 +1,8 @@
const { isEnabled } = require('@librechat/api');
const { EModelEndpoint, CacheKeys, Constants, googleSettings } = require('librechat-data-provider');
const { getAppConfig } = require('~/server/services/Config');
const getLogStores = require('~/cache/getLogStores');
const initializeClient = require('./initialize');
const { isEnabled } = require('~/server/utils');
const { saveConvo } = require('~/models');
const addTitle = async (req, { text, response, client }) => {
@ -14,7 +15,8 @@ const addTitle = async (req, { text, response, client }) => {
return;
}
const { GOOGLE_TITLE_MODEL } = process.env ?? {};
const providerConfig = req.app.locals[EModelEndpoint.google];
const appConfig = await getAppConfig({ role: req.user?.role });
const providerConfig = appConfig[EModelEndpoint.google];
let model =
providerConfig?.titleModel ??
GOOGLE_TITLE_MODEL ??

View file

@ -8,6 +8,7 @@ const {
createHandleLLMNewToken,
} = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getAppConfig } = require('~/server/services/Config');
const OpenAIClient = require('~/app/clients/OpenAIClient');
const initializeClient = async ({
@ -18,6 +19,7 @@ const initializeClient = async ({
overrideEndpoint,
overrideModel,
}) => {
const appConfig = await getAppConfig({ role: req.user?.role });
const {
PROXY,
OPENAI_API_KEY,
@ -64,7 +66,7 @@ const initializeClient = async ({
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
/** @type {false | TAzureConfig} */
const azureConfig = isAzureOpenAI && req.app.locals[EModelEndpoint.azureOpenAI];
const azureConfig = isAzureOpenAI && appConfig[EModelEndpoint.azureOpenAI];
let serverless = false;
if (isAzureOpenAI && azureConfig) {
const { modelGroupMap, groupMap } = azureConfig;
@ -113,7 +115,7 @@ const initializeClient = async ({
}
/** @type {undefined | TBaseEndpoint} */
const openAIConfig = req.app.locals[EModelEndpoint.openAI];
const openAIConfig = appConfig[EModelEndpoint.openAI];
if (!isAzureOpenAI && openAIConfig) {
clientOptions.streamRate = openAIConfig.streamRate;
@ -121,7 +123,7 @@ const initializeClient = async ({
}
/** @type {undefined | TBaseEndpoint} */
const allConfig = req.app.locals.all;
const allConfig = appConfig.all;
if (allConfig) {
clientOptions.streamRate = allConfig.streamRate;
}