📦 refactor: Consolidate DB models, encapsulating Mongoose usage in data-schemas (#11830)

* chore: move database model methods to /packages/data-schemas

* chore: add TypeScript ESLint rule to warn on unused variables

* refactor: model imports to streamline access

- Consolidated model imports across various files to improve code organization and reduce redundancy.
- Updated imports for models such as Assistant, Message, Conversation, and others to a unified import path.
- Adjusted middleware and service files to reflect the new import structure, ensuring functionality remains intact.
- Enhanced test files to align with the new import paths, maintaining test coverage and integrity.

* chore: migrate database models to packages/data-schemas and refactor all direct Mongoose Model usage outside of data-schemas

* test: update agent model mocks in unit tests

- Added `getAgent` mock to `client.test.js` to enhance test coverage for agent-related functionality.
- Removed redundant `getAgent` and `getAgents` mocks from `openai.spec.js` and `responses.unit.spec.js` to streamline test setup and reduce duplication.
- Ensured consistency in agent mock implementations across test files.

* fix: update types in data-schemas

* refactor: enhance type definitions in transaction and spending methods

- Updated type definitions in `checkBalance.ts` to use specific request and response types.
- Refined `spendTokens.ts` to utilize a new `SpendTxData` interface for better clarity and type safety.
- Improved transaction handling in `transaction.ts` by introducing `TransactionResult` and `TxData` interfaces, ensuring consistent data structures across methods.
- Adjusted unit tests in `transaction.spec.ts` to accommodate new type definitions and enhance robustness.

* refactor: streamline model imports and enhance code organization

- Consolidated model imports across various controllers and services to a unified import path, improving code clarity and reducing redundancy.
- Updated multiple files to reflect the new import structure, ensuring all functionalities remain intact.
- Enhanced overall code organization by removing duplicate import statements and optimizing the usage of model methods.

* feat: implement loadAddedAgent and refactor agent loading logic

- Introduced `loadAddedAgent` function to handle loading agents from added conversations, supporting multi-convo parallel execution.
- Created a new `load.ts` file to encapsulate agent loading functionalities, including `loadEphemeralAgent` and `loadAgent`.
- Updated the `index.ts` file to export the new `load` module instead of the deprecated `loadAgent`.
- Enhanced type definitions and improved error handling in the agent loading process.
- Adjusted unit tests to reflect changes in the agent loading structure and ensure comprehensive coverage.

* refactor: enhance balance handling with new update interface

- Introduced `IBalanceUpdate` interface to streamline balance update operations across the codebase.
- Updated `upsertBalanceFields` method signatures in `balance.ts`, `transaction.ts`, and related tests to utilize the new interface for improved type safety.
- Adjusted type imports in `balance.spec.ts` to include `IBalanceUpdate`, ensuring consistency in balance management functionalities.
- Enhanced overall code clarity and maintainability by refining type definitions related to balance operations.

* feat: add unit tests for loadAgent functionality and enhance agent loading logic

- Introduced comprehensive unit tests for the `loadAgent` function, covering various scenarios including null and empty agent IDs, loading of ephemeral agents, and permission checks.
- Enhanced the `initializeClient` function by moving `getConvoFiles` to the correct position in the database method exports, ensuring proper functionality.
- Improved test coverage for agent loading, including handling of non-existent agents and user permissions.

* chore: reorder memory method exports for consistency

- Moved `deleteAllUserMemories` to the correct position in the exported memory methods, ensuring a consistent and logical order of method exports in `memory.ts`.
This commit is contained in:
Danny Avila 2026-02-17 18:23:44 -05:00
parent a85e99ff45
commit a6fb257bcf
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
182 changed files with 8548 additions and 8105 deletions

View file

@ -20,9 +20,14 @@ const {
isImageVisionTool,
actionDomainSeparator,
} = require('librechat-data-provider');
const { findToken, updateToken, createToken } = require('~/models');
const { getActions, deleteActions } = require('~/models/Action');
const { deleteAssistant } = require('~/models/Assistant');
const {
findToken,
updateToken,
createToken,
getActions,
deleteActions,
deleteAssistant,
} = require('~/models');
const { getFlowStateManager } = require('~/config');
const { getLogStores } = require('~/cache');

View file

@ -1,12 +1,15 @@
const { logger } = require('@librechat/data-schemas');
const { initializeAgent, validateAgentModel } = require('@librechat/api');
const { loadAddedAgent, setGetAgent, ADDED_AGENT_ID } = require('~/models/loadAddedAgent');
const { getConvoFiles } = require('~/models/Conversation');
const { getAgent } = require('~/models/Agent');
const {
ADDED_AGENT_ID,
initializeAgent,
validateAgentModel,
loadAddedAgent: loadAddedAgentFn,
} = require('@librechat/api');
const { getMCPServerTools } = require('~/server/services/Config');
const db = require('~/models');
// Initialize the getAgent dependency
setGetAgent(getAgent);
const loadAddedAgent = (params) =>
loadAddedAgentFn(params, { getAgent: db.getAgent, getMCPServerTools });
/**
* Process addedConvo for parallel agent execution.
@ -99,10 +102,10 @@ const processAddedConvo = async ({
allowedProviders,
},
{
getConvoFiles,
getFiles: db.getFiles,
getUserKey: db.getUserKey,
getMessages: db.getMessages,
getConvoFiles: db.getConvoFiles,
updateFilesUsage: db.updateFilesUsage,
getUserCodeFiles: db.getUserCodeFiles,
getUserKeyValues: db.getUserKeyValues,

View file

@ -1,6 +1,10 @@
const { logger } = require('@librechat/data-schemas');
const { loadAgent: loadAgentFn } = require('@librechat/api');
const { isAgentsEndpoint, removeNullishValues, Constants } = require('librechat-data-provider');
const { loadAgent } = require('~/models/Agent');
const { getMCPServerTools } = require('~/server/services/Config');
const db = require('~/models');
const loadAgent = (params) => loadAgentFn(params, { getAgent: db.getAgent, getMCPServerTools });
const buildOptions = (req, endpoint, parsedBody, endpointType) => {
const { spec, iconURL, agent_id, ...model_parameters } = parsedBody;

View file

@ -22,9 +22,7 @@ const {
const { loadAgentTools, loadToolsForExecution } = require('~/server/services/ToolService');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const AgentClient = require('~/server/controllers/agents/client');
const { getConvoFiles } = require('~/models/Conversation');
const { processAddedConvo } = require('./addedConvo');
const { getAgent } = require('~/models/Agent');
const { logViolation } = require('~/cache');
const db = require('~/models');
@ -191,10 +189,10 @@ const initializeClient = async ({ req, res, signal, endpointOption }) => {
isInitialAgent: true,
},
{
getConvoFiles,
getFiles: db.getFiles,
getUserKey: db.getUserKey,
getMessages: db.getMessages,
getConvoFiles: db.getConvoFiles,
updateFilesUsage: db.updateFilesUsage,
getUserKeyValues: db.getUserKeyValues,
getUserCodeFiles: db.getUserCodeFiles,
@ -226,7 +224,7 @@ const initializeClient = async ({ req, res, signal, endpointOption }) => {
const skippedAgentIds = new Set();
async function processAgent(agentId) {
const agent = await getAgent({ id: agentId });
const agent = await db.getAgent({ id: agentId });
if (!agent) {
logger.warn(
`[processAgent] Handoff agent ${agentId} not found, skipping (orphaned reference)`,
@ -260,10 +258,10 @@ const initializeClient = async ({ req, res, signal, endpointOption }) => {
allowedProviders,
},
{
getConvoFiles,
getFiles: db.getFiles,
getUserKey: db.getUserKey,
getMessages: db.getMessages,
getConvoFiles: db.getConvoFiles,
updateFilesUsage: db.updateFilesUsage,
getUserKeyValues: db.getUserKeyValues,
getUserCodeFiles: db.getUserCodeFiles,

View file

@ -66,7 +66,11 @@ const addTitle = async (req, { text, response, client }) => {
await titleCache.set(key, title, 120000);
await saveConvo(
req,
{
userId: req?.user?.id,
isTemporary: req?.body?.isTemporary,
interfaceConfig: req?.config?.interfaceConfig,
},
{
conversationId: response.conversationId,
title,

View file

@ -1,6 +1,6 @@
const { removeNullishValues } = require('librechat-data-provider');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getAssistant } = require('~/models/Assistant');
const { getAssistant } = require('~/models');
const buildOptions = async (endpoint, parsedBody) => {
const { promptPrefix, assistant_id, iconURL, greeting, spec, artifacts, ...modelOptions } =

View file

@ -1,9 +1,9 @@
const { isEnabled, sanitizeTitle } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider');
const { saveConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
const initializeClient = require('./initalize');
const { saveConvo } = require('~/models');
/**
* Generates a conversation title using OpenAI SDK
@ -63,8 +63,13 @@ const addTitle = async (req, { text, responseText, conversationId }) => {
const title = await generateTitle({ openai, text, responseText });
await titleCache.set(key, title, 120000);
const reqCtx = {
userId: req?.user?.id,
isTemporary: req?.body?.isTemporary,
interfaceConfig: req?.config?.interfaceConfig,
};
await saveConvo(
req,
reqCtx,
{
conversationId,
title,
@ -76,7 +81,11 @@ const addTitle = async (req, { text, responseText, conversationId }) => {
const fallbackTitle = text.length > 40 ? text.substring(0, 37) + '...' : text;
await titleCache.set(key, fallbackTitle, 120000);
await saveConvo(
req,
{
userId: req?.user?.id,
isTemporary: req?.body?.isTemporary,
interfaceConfig: req?.config?.interfaceConfig,
},
{
conversationId,
title: fallbackTitle,

View file

@ -1,6 +1,6 @@
const { removeNullishValues } = require('librechat-data-provider');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getAssistant } = require('~/models/Assistant');
const { getAssistant } = require('~/models');
const buildOptions = async (endpoint, parsedBody) => {
const { promptPrefix, assistant_id, iconURL, greeting, spec, artifacts, ...modelOptions } =

View file

@ -5,8 +5,8 @@ const {
parseTextParts,
findLastSeparatorIndex,
} = require('librechat-data-provider');
const { getMessage } = require('~/models/Message');
const { getLogStores } = require('~/cache');
const { getMessage } = require('~/models');
/**
* @param {string[]} voiceIds - Array of voice IDs

View file

@ -3,7 +3,7 @@ const { createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
jest.mock('keyv');
const globalCache = {};
jest.mock('~/models/Message', () => {
jest.mock('~/models', () => {
return {
getMessage: jest.fn().mockImplementation((messageId) => {
return globalCache[messageId] || null;

View file

@ -8,8 +8,7 @@ const {
EModelEndpoint,
PermissionTypes,
} = require('librechat-data-provider');
const { getRoleByName } = require('~/models/Role');
const { Files } = require('~/models');
const { getRoleByName, getFiles } = require('~/models');
/**
* Process file search results from tool calls
@ -127,7 +126,7 @@ async function enhanceSourcesWithMetadata(sources, appConfig) {
let fileMetadataMap = {};
try {
const files = await Files.find({ file_id: { $in: fileIds } });
const files = await getFiles({ file_id: { $in: fileIds } });
fileMetadataMap = files.reduce((map, file) => {
map[file.file_id] = file;
return map;

View file

@ -1,7 +1,7 @@
const { logger } = require('@librechat/data-schemas');
const { PermissionBits, ResourceType } = require('librechat-data-provider');
const { checkPermission } = require('~/server/services/PermissionService');
const { getAgent } = require('~/models/Agent');
const { getAgent } = require('~/models');
/**
* Checks if a user has access to multiple files through a shared agent (batch operation)

View file

@ -26,16 +26,15 @@ const {
resizeImageBuffer,
} = require('~/server/services/Files/images');
const { addResourceFileId, deleteResourceFileId } = require('~/server/controllers/assistants/v2');
const { addAgentResourceFile, removeAgentResourceFiles } = require('~/models/Agent');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { createFile, updateFileUsage, deleteFiles } = require('~/models');
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
const { checkCapability } = require('~/server/services/Config');
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
const { getStrategyFunctions } = require('./strategies');
const { determineFileType } = require('~/server/utils');
const { STTService } = require('./Audio/STTService');
const db = require('~/models');
/**
* Creates a modular file upload wrapper that ensures filename sanitization
@ -210,7 +209,7 @@ const processDeleteRequest = async ({ req, files }) => {
if (agentFiles.length > 0) {
promises.push(
removeAgentResourceFiles({
db.removeAgentResourceFiles({
agent_id: req.body.agent_id,
files: agentFiles,
}),
@ -218,7 +217,7 @@ const processDeleteRequest = async ({ req, files }) => {
}
await Promise.allSettled(promises);
await deleteFiles(resolvedFileIds);
await db.deleteFiles(resolvedFileIds);
};
/**
@ -250,7 +249,7 @@ const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath, c
dimensions = {},
} = (await saveURL({ userId, URL, fileName, basePath })) || {};
const filepath = await getFileURL({ fileName: `${userId}/${fileName}`, basePath });
return await createFile(
return await db.createFile(
{
user: userId,
file_id: v4(),
@ -296,7 +295,7 @@ const processImageFile = async ({ req, res, metadata, returnFile = false }) => {
endpoint,
});
const result = await createFile(
const result = await db.createFile(
{
user: req.user.id,
file_id,
@ -348,7 +347,7 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
}
const fileName = `${file_id}-${filename}`;
const filepath = await saveBuffer({ userId: req.user.id, fileName, buffer });
return await createFile(
return await db.createFile(
{
user: req.user.id,
file_id,
@ -434,7 +433,7 @@ const processFileUpload = async ({ req, res, metadata }) => {
filepath = result.filepath;
}
const result = await createFile(
const result = await db.createFile(
{
user: req.user.id,
file_id: id ?? file_id,
@ -538,14 +537,14 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
});
if (!messageAttachment && tool_resource) {
await addAgentResourceFile({
req,
await db.addAgentResourceFile({
file_id,
agent_id,
tool_resource,
updatingUserId: req?.user?.id,
});
}
const result = await createFile(fileInfo, true);
const result = await db.createFile(fileInfo, true);
return res
.status(200)
.json({ message: 'Agent file uploaded and processed successfully', ...result });
@ -655,11 +654,11 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
let filepath = _filepath;
if (!messageAttachment && tool_resource) {
await addAgentResourceFile({
req,
await db.addAgentResourceFile({
file_id,
agent_id,
tool_resource,
updatingUserId: req?.user?.id,
});
}
@ -690,7 +689,7 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
width,
});
const result = await createFile(fileInfo, true);
const result = await db.createFile(fileInfo, true);
res.status(200).json({ message: 'Agent file uploaded and processed successfully', ...result });
};
@ -736,10 +735,10 @@ const processOpenAIFile = async ({
};
if (saveFile) {
await createFile(file, true);
await db.createFile(file, true);
} else if (updateUsage) {
try {
await updateFileUsage({ file_id });
await db.updateFileUsage({ file_id });
} catch (error) {
logger.error('Error updating file usage', error);
}
@ -777,7 +776,7 @@ const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileEx
file_id,
filename,
};
createFile(file, true);
db.createFile(file, true);
return file;
};
@ -921,7 +920,7 @@ async function saveBase64Image(
fileName: filename,
buffer: image.buffer,
});
return await createFile(
return await db.createFile(
{
type,
source,

View file

@ -15,16 +15,22 @@ const {
getEffectivePermissionsForResources: getEffectivePermissionsForResourcesACL,
grantPermission: grantPermissionACL,
findEntriesByPrincipalsAndResource,
findRolesByResourceType,
findPublicResourceIds,
bulkWriteAclEntries,
findGroupByExternalId,
findRoleByIdentifier,
deleteAclEntries,
getUserPrincipals,
findGroupByQuery,
updateGroupById,
bulkUpdateGroups,
hasPermission,
createGroup,
createUser,
updateUser,
findUser,
} = require('~/models');
const { AclEntry, AccessRole, Group } = require('~/db/models');
/** @type {boolean|null} */
let transactionSupportCache = null;
@ -275,17 +281,9 @@ const findPubliclyAccessibleResources = async ({ resourceType, requiredPermissio
validateResourceType(resourceType);
// Find all public ACL entries where the public principal has at least the required permission bits
const entries = await AclEntry.find({
principalType: PrincipalType.PUBLIC,
resourceType,
permBits: { $bitsAllSet: requiredPermissions },
}).distinct('resourceId');
return entries;
return await findPublicResourceIds(resourceType, requiredPermissions);
} catch (error) {
logger.error(`[PermissionService.findPubliclyAccessibleResources] Error: ${error.message}`);
// Re-throw validation errors
if (error.message.includes('requiredPermissions must be')) {
throw error;
}
@ -302,7 +300,7 @@ const findPubliclyAccessibleResources = async ({ resourceType, requiredPermissio
const getAvailableRoles = async ({ resourceType }) => {
validateResourceType(resourceType);
return await AccessRole.find({ resourceType }).lean();
return await findRolesByResourceType(resourceType);
};
/**
@ -423,7 +421,7 @@ const ensureGroupPrincipalExists = async function (principal, authContext = null
let existingGroup = await findGroupByExternalId(principal.idOnTheSource, 'entra');
if (!existingGroup && principal.email) {
existingGroup = await Group.findOne({ email: principal.email.toLowerCase() }).lean();
existingGroup = await findGroupByQuery({ email: principal.email.toLowerCase() });
}
if (existingGroup) {
@ -452,7 +450,7 @@ const ensureGroupPrincipalExists = async function (principal, authContext = null
}
if (needsUpdate) {
await Group.findByIdAndUpdate(existingGroup._id, { $set: updateData }, { new: true });
await updateGroupById(existingGroup._id, updateData);
}
return existingGroup._id.toString();
@ -520,7 +518,7 @@ const syncUserEntraGroupMemberships = async (user, accessToken, session = null)
const sessionOptions = session ? { session } : {};
await Group.updateMany(
await bulkUpdateGroups(
{
idOnTheSource: { $in: allGroupIds },
source: 'entra',
@ -530,7 +528,7 @@ const syncUserEntraGroupMemberships = async (user, accessToken, session = null)
sessionOptions,
);
await Group.updateMany(
await bulkUpdateGroups(
{
source: 'entra',
memberIds: user.idOnTheSource,
@ -628,7 +626,7 @@ const bulkUpdateResourcePermissions = async ({
const sessionOptions = localSession ? { session: localSession } : {};
const roles = await AccessRole.find({ resourceType }).lean();
const roles = await findRolesByResourceType(resourceType);
const rolesMap = new Map();
roles.forEach((role) => {
rolesMap.set(role.accessRoleId, role);
@ -732,7 +730,7 @@ const bulkUpdateResourcePermissions = async ({
}
if (bulkWrites.length > 0) {
await AclEntry.bulkWrite(bulkWrites, sessionOptions);
await bulkWriteAclEntries(bulkWrites, sessionOptions);
}
const deleteQueries = [];
@ -773,12 +771,7 @@ const bulkUpdateResourcePermissions = async ({
}
if (deleteQueries.length > 0) {
await AclEntry.deleteMany(
{
$or: deleteQueries,
},
sessionOptions,
);
await deleteAclEntries({ $or: deleteQueries }, sessionOptions);
}
if (shouldEndSession && supportsTransactions) {
@ -822,7 +815,7 @@ const removeAllPermissions = async ({ resourceType, resourceId }) => {
throw new Error(`Invalid resource ID: ${resourceId}`);
}
const result = await AclEntry.deleteMany({
const result = await deleteAclEntries({
resourceType,
resourceId,
});

View file

@ -1,16 +1,15 @@
const path = require('path');
const { v4 } = require('uuid');
const { countTokens, escapeRegExp } = require('@librechat/api');
const { countTokens } = require('@librechat/api');
const { escapeRegExp } = require('@librechat/data-schemas');
const {
Constants,
ContentTypes,
AnnotationTypes,
defaultOrderQuery,
} = require('librechat-data-provider');
const { recordMessage, getMessages, spendTokens, saveConvo } = require('~/models');
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
const { recordMessage, getMessages } = require('~/models/Message');
const { spendTokens } = require('~/models/spendTokens');
const { saveConvo } = require('~/models/Conversation');
/**
* Initializes a new thread or adds messages to an existing thread.
@ -62,24 +61,6 @@ async function initThread({ openai, body, thread_id: _thread_id }) {
async function saveUserMessage(req, params) {
const tokenCount = await countTokens(params.text);
// todo: do this on the frontend
// const { file_ids = [] } = params;
// let content;
// if (file_ids.length) {
// content = [
// {
// value: params.text,
// },
// ...(
// file_ids
// .filter(f => f)
// .map((file_id) => ({
// file_id,
// }))
// ),
// ];
// }
const userMessage = {
user: params.user,
endpoint: params.endpoint,
@ -110,9 +91,15 @@ async function saveUserMessage(req, params) {
}
const message = await recordMessage(userMessage);
await saveConvo(req, convo, {
context: 'api/server/services/Threads/manage.js #saveUserMessage',
});
await saveConvo(
{
userId: req?.user?.id,
isTemporary: req?.body?.isTemporary,
interfaceConfig: req?.config?.interfaceConfig,
},
convo,
{ context: 'api/server/services/Threads/manage.js #saveUserMessage' },
);
return message;
}
@ -161,7 +148,11 @@ async function saveAssistantMessage(req, params) {
});
await saveConvo(
req,
{
userId: req?.user?.id,
isTemporary: req?.body?.isTemporary,
interfaceConfig: req?.config?.interfaceConfig,
},
{
endpoint: params.endpoint,
conversationId: params.conversationId,
@ -353,7 +344,11 @@ async function syncMessages({
await Promise.all(recordPromises);
await saveConvo(
openai.req,
{
userId: openai.req?.user?.id,
isTemporary: openai.req?.body?.isTemporary,
interfaceConfig: openai.req?.config?.interfaceConfig,
},
{
conversationId,
file_ids: attached_file_ids,

View file

@ -1,5 +1,5 @@
const { logger } = require('@librechat/data-schemas');
const { deleteNullOrEmptyConversations } = require('~/models/Conversation');
const { deleteNullOrEmptyConversations } = require('~/models');
const cleanup = async () => {
try {

View file

@ -6,7 +6,6 @@ const {
checkAgentPermissionsMigration,
checkPromptPermissionsMigration,
} = require('@librechat/api');
const { Agent, PromptGroup } = require('~/db/models');
const { findRoleByIdentifier } = require('~/models');
/**
@ -20,7 +19,7 @@ async function checkMigrations() {
methods: {
findRoleByIdentifier,
},
AgentModel: Agent,
AgentModel: mongoose.models.Agent,
});
logAgentMigrationWarning(agentMigrationResult);
} catch (error) {
@ -32,7 +31,7 @@ async function checkMigrations() {
methods: {
findRoleByIdentifier,
},
PromptGroupModel: PromptGroup,
PromptGroupModel: mongoose.models.PromptGroup,
});
logPromptMigrationWarning(promptMigrationResult);
} catch (error) {